From 71c9a82e93816915b42cf145208dbec4ace45956 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Feb 2023 11:31:19 +0100 Subject: [PATCH 001/218] chore(actions): changing paths --- .github/workflows/utilities-ee.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/utilities-ee.yaml b/.github/workflows/utilities-ee.yaml index f9a1ac677..92270dc3d 100644 --- a/.github/workflows/utilities-ee.yaml +++ b/.github/workflows/utilities-ee.yaml @@ -6,6 +6,8 @@ on: - dev paths: - "ee/utilities/**" + - "!ee/utilities/.gitignore" + - "!ee/utilities/*-dev.sh" - "utilities/*/**" - "!utilities/.gitignore" - "!utilities/*-dev.sh" From 553955ccec7d8591375ec207deb800800bbf359a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Feb 2023 11:31:48 +0100 Subject: [PATCH 002/218] chore(actions): changing branches --- .github/workflows/utilities-ee.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/utilities-ee.yaml b/.github/workflows/utilities-ee.yaml index 92270dc3d..d28f72968 100644 --- a/.github/workflows/utilities-ee.yaml +++ b/.github/workflows/utilities-ee.yaml @@ -3,7 +3,7 @@ on: workflow_dispatch: push: branches: - - dev + - api-v1.10.0 paths: - "ee/utilities/**" - "!ee/utilities/.gitignore" From 804a3cfc6addff3ff2b6dc6a3da8da3ad4bf6af9 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Thu, 16 Feb 2023 14:46:03 +0100 Subject: [PATCH 003/218] ci(actions): Adding assist GH actions Signed-off-by: rjshrjndrn --- .github/workflows/assist-ee.yaml | 120 ++++++++++++++++++++++++++++ .github/workflows/assist.yaml | 120 ++++++++++++++++++++++++++++ .github/workflows/utilities-ee.yaml | 71 ---------------- .github/workflows/utilities.yaml | 68 ---------------- 4 files changed, 240 insertions(+), 139 deletions(-) create mode 100644 .github/workflows/assist-ee.yaml create mode 100644 .github/workflows/assist.yaml delete mode 100644 .github/workflows/utilities-ee.yaml delete mode 100644 .github/workflows/utilities.yaml diff --git a/.github/workflows/assist-ee.yaml b/.github/workflows/assist-ee.yaml new file mode 100644 index 000000000..78a783dd1 --- /dev/null +++ b/.github/workflows/assist-ee.yaml @@ -0,0 +1,120 @@ +# This action will push the assist changes to aws +on: + workflow_dispatch: + push: + branches: + - dev + paths: + - "ee/utilities/**" + - "utilities/*/**" + - "!utilities/.gitignore" + - "!utilities/*-dev.sh" + +name: Build and Deploy Assist EE + +jobs: + deploy: + name: Deploy + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + # We need to diff with old commit + # to see which workers got changed. + fetch-depth: 2 + + - name: Docker login + run: | + docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}" + + - uses: azure/k8s-set-context@v1 + with: + method: kubeconfig + kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret. + id: setcontext + + - name: Building and Pushing Assist image + id: build-image + env: + DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} + IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee + ENVIRONMENT: staging + run: | + skip_security_checks=${{ github.event.inputs.skip_security_checks }} + cd utilities + PUSH_IMAGE=0 bash -x ./build.sh ee + [[ "x$skip_security_checks" == "xtrue" ]] || { + curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ + images=("assist") + for image in ${images[*]};do + ./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG + done + err_code=$? + [[ $err_code -ne 0 ]] && { + exit $err_code + } + } && { + echo "Skipping Security Checks" + } + images=("assist") + for image in ${images[*]};do + docker push $DOCKER_REPO/$image:$IMAGE_TAG + done + - name: Creating old image input + run: | + # + # Create yaml with existing image tags + # + kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\ + tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt + + echo > /tmp/image_override.yaml + + for line in `cat /tmp/image_tag.txt`; + do + image_array=($(echo "$line" | tr ':' '\n')) + cat <> /tmp/image_override.yaml + ${image_array[0]}: + image: + # We've to strip off the -ee, as helm will append it. + tag: `echo ${image_array[1]} | cut -d '-' -f 1` + EOF + done + - name: Deploy to kubernetes + run: | + cd scripts/helmcharts/ + + ## Update secerts + sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml + sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.EE_PG_PASSWORD }}\"/g" vars.yaml + sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\"/g" vars.yaml + sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.EE_MINIO_SECRET_KEY }}\"/g" vars.yaml + sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.EE_JWT_SECRET }}\"/g" vars.yaml + sed -i "s/domainName: \"\"/domainName: \"${{ secrets.EE_DOMAIN_NAME }}\"/g" vars.yaml + sed -i "s/enterpriseEditionLicense: \"\"/enterpriseEditionLicense: \"${{ secrets.EE_LICENSE_KEY }}\"/g" vars.yaml + + # Update changed image tag + sed -i "/assist/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml + + cat /tmp/image_override.yaml + # Deploy command + mv openreplay/charts/{ingress-nginx,chalice,quickwit} /tmp + rm -rf openreplay/charts/* + mv /tmp/{ingress-nginx,chalice,quickwit} openreplay/charts/ + helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f - + env: + DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} + # We're not passing -ee flag, because helm will add that. + IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} + ENVIRONMENT: staging + + # - name: Debug Job + # if: ${{ failure() }} + # uses: mxschmitt/action-tmate@v3 + # env: + # DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} + # IMAGE_TAG: ${{ github.sha }} + # ENVIRONMENT: staging + # diff --git a/.github/workflows/assist.yaml b/.github/workflows/assist.yaml new file mode 100644 index 000000000..cf4d184cf --- /dev/null +++ b/.github/workflows/assist.yaml @@ -0,0 +1,120 @@ +# This action will push the assist changes to aws +on: + workflow_dispatch: + push: + branches: + - dev + paths: + - "ee/utilities/**" + - "utilities/*/**" + - "!utilities/.gitignore" + - "!utilities/*-dev.sh" + +name: Build and Deploy Assist EE + +jobs: + deploy: + name: Deploy + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + # We need to diff with old commit + # to see which workers got changed. + fetch-depth: 2 + + - name: Docker login + run: | + docker login ${{ secrets.OSS_REGISTRY_URL }} -u ${{ secrets.OSS_DOCKER_USERNAME }} -p "${{ secrets.OSS_REGISTRY_TOKEN }}" + + - uses: azure/k8s-set-context@v1 + with: + method: kubeconfig + kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret. + id: setcontext + + - name: Building and Pushing Assist image + id: build-image + env: + DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} + IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee + ENVIRONMENT: staging + run: | + skip_security_checks=${{ github.event.inputs.skip_security_checks }} + cd utilities + PUSH_IMAGE=0 bash -x ./build.sh ee + [[ "x$skip_security_checks" == "xtrue" ]] || { + curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ + images=("assist") + for image in ${images[*]};do + ./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG + done + err_code=$? + [[ $err_code -ne 0 ]] && { + exit $err_code + } + } && { + echo "Skipping Security Checks" + } + images=("assist") + for image in ${images[*]};do + docker push $DOCKER_REPO/$image:$IMAGE_TAG + done + - name: Creating old image input + run: | + # + # Create yaml with existing image tags + # + kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\ + tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt + + echo > /tmp/image_override.yaml + + for line in `cat /tmp/image_tag.txt`; + do + image_array=($(echo "$line" | tr ':' '\n')) + cat <> /tmp/image_override.yaml + ${image_array[0]}: + image: + # We've to strip off the -ee, as helm will append it. + tag: `echo ${image_array[1]} | cut -d '-' -f 1` + EOF + done + - name: Deploy to kubernetes + run: | + cd scripts/helmcharts/ + + ## Update secerts + sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml + sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.OSS_PG_PASSWORD }}\"/g" vars.yaml + sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\"/g" vars.yaml + sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\"/g" vars.yaml + sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.OSS_JWT_SECRET }}\"/g" vars.yaml + sed -i "s/domainName: \"\"/domainName: \"${{ secrets.OSS_DOMAIN_NAME }}\"/g" vars.yaml + sed -i "s/enterpriseEditionLicense: \"\"/enterpriseEditionLicense: \"${{ secrets.OSS_LICENSE_KEY }}\"/g" vars.yaml + + # Update changed image tag + sed -i "/assist/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml + + cat /tmp/image_override.yaml + # Deploy command + mv openreplay/charts/{ingress-nginx,chalice,quickwit} /tmp + rm -rf openreplay/charts/* + mv /tmp/{ingress-nginx,chalice,quickwit} openreplay/charts/ + helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f - + env: + DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} + # We're not passing -ee flag, because helm will add that. + IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} + ENVIRONMENT: staging + + # - name: Debug Job + # if: ${{ failure() }} + # uses: mxschmitt/action-tmate@v3 + # env: + # DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} + # IMAGE_TAG: ${{ github.sha }} + # ENVIRONMENT: staging + # diff --git a/.github/workflows/utilities-ee.yaml b/.github/workflows/utilities-ee.yaml deleted file mode 100644 index d28f72968..000000000 --- a/.github/workflows/utilities-ee.yaml +++ /dev/null @@ -1,71 +0,0 @@ -# This action will push the assist changes to aws -on: - workflow_dispatch: - push: - branches: - - api-v1.10.0 - paths: - - "ee/utilities/**" - - "!ee/utilities/.gitignore" - - "!ee/utilities/*-dev.sh" - - "utilities/*/**" - - "!utilities/.gitignore" - - "!utilities/*-dev.sh" - -name: Build and Deploy Assist EE - -jobs: - deploy: - name: Deploy - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v2 - with: - # We need to diff with old commit - # to see which workers got changed. - fetch-depth: 2 - - - name: Docker login - run: | - docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}" - - - uses: azure/k8s-set-context@v1 - with: - method: kubeconfig - kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret. - id: setcontext - - - name: Building and Pushing api image - id: build-image - env: - DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} - IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee - ENVIRONMENT: staging - run: | - cd utilities - PUSH_IMAGE=1 bash build.sh ee - - name: Deploy to kubernetes - run: | - cd scripts/helmcharts/ - sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.EE_REGISTRY_URL }}\"#g" vars.yaml - sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\" #g" vars.yaml - sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.EE_MINIO_SECRET_KEY }}\" #g" vars.yaml - sed -i "s#domain_name.*#domain_name: \"ee.openreplay.com\" #g" vars.yaml - sed -i "s#kubeconfig.*#kubeconfig_path: ${EE_KUBECONFIG}#g" vars.yaml - sed -i "s/image_tag:.*/image_tag: \"$IMAGE_TAG\"/g" vars.yaml - bash kube-install.sh --app utilities - env: - DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} - IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} - ENVIRONMENT: staging - - # - name: Debug Job - # if: ${{ failure() }} - # uses: mxschmitt/action-tmate@v3 - # env: - # DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} - # IMAGE_TAG: ${{ github.sha }} - # ENVIRONMENT: staging - # diff --git a/.github/workflows/utilities.yaml b/.github/workflows/utilities.yaml deleted file mode 100644 index 7d2792d9b..000000000 --- a/.github/workflows/utilities.yaml +++ /dev/null @@ -1,68 +0,0 @@ -# This action will push the assist changes to aws -on: - workflow_dispatch: - push: - branches: - - api-v1.10.0 - paths: - - "utilities/**" - - "!utilities/.gitignore" - - "!utilities/*-dev.sh" - -name: Build and Deploy Assist - -jobs: - deploy: - name: Deploy - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v2 - with: - # We need to diff with old commit - # to see which workers got changed. - fetch-depth: 2 - - - name: Docker login - run: | - docker login ${{ secrets.OSS_REGISTRY_URL }} -u ${{ secrets.OSS_DOCKER_USERNAME }} -p "${{ secrets.OSS_REGISTRY_TOKEN }}" - - - uses: azure/k8s-set-context@v1 - with: - method: kubeconfig - kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret. - id: setcontext - - - name: Building and Pushing api image - id: build-image - env: - DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} - IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} - ENVIRONMENT: staging - run: | - cd utilities - PUSH_IMAGE=1 bash build.sh - - name: Deploy to kubernetes - run: | - cd scripts/helmcharts/ - sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml - sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\" #g" vars.yaml - sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\" #g" vars.yaml - sed -i "s#domain_name.*#domain_name: \"foss.openreplay.com\" #g" vars.yaml - sed -i "s#kubeconfig.*#kubeconfig_path: ${KUBECONFIG}#g" vars.yaml - sed -i "s/image_tag:.*/image_tag: \"$IMAGE_TAG\"/g" vars.yaml - bash kube-install.sh --app utilities - env: - DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} - IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} - ENVIRONMENT: staging - - # - name: Debug Job - # if: ${{ failure() }} - # uses: mxschmitt/action-tmate@v3 - # env: - # DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} - # IMAGE_TAG: ${{ github.sha }} - # ENVIRONMENT: staging - # From 3d31bab0600d3fbf5b2dd6e8b723c94412f59508 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Feb 2023 17:11:25 +0100 Subject: [PATCH 004/218] feat(assist): support missing protocol --- ee/utilities/servers/websocket-cluster.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index 6aa2bade5..77cfd5067 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -24,7 +24,7 @@ const { const {createAdapter} = require("@socket.io/redis-adapter"); const {createClient} = require("redis"); const wsRouter = express.Router(); -const REDIS_URL = process.env.REDIS_URL || "redis://localhost:6379"; +const REDIS_URL = (process.env.REDIS_URL || "localhost:6379").replace(/((^\w+:|^)\/\/|^)/, 'redis://'); const pubClient = createClient({url: REDIS_URL}); const subClient = pubClient.duplicate(); console.log(`Using Redis: ${REDIS_URL}`); From 161a84b86d3ab8b6053aa2ff36fd7c84dce0a929 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Feb 2023 18:25:58 +0100 Subject: [PATCH 005/218] feat(assist): upgrade changes --- ee/utilities/.gitignore | 1 - ee/utilities/run-dev.sh | 6 ++++++ ee/utilities/servers/websocket-cluster.js | 5 +++-- ee/utilities/servers/websocket.js | 2 +- utilities/servers/websocket.js | 2 +- 5 files changed, 11 insertions(+), 5 deletions(-) create mode 100755 ee/utilities/run-dev.sh diff --git a/ee/utilities/.gitignore b/ee/utilities/.gitignore index 8c9dca279..cd68b1ffb 100644 --- a/ee/utilities/.gitignore +++ b/ee/utilities/.gitignore @@ -15,5 +15,4 @@ servers/sourcemaps-server.js /utils/helper.js /utils/assistHelper.js .local -run-dev.sh *.mmdb diff --git a/ee/utilities/run-dev.sh b/ee/utilities/run-dev.sh new file mode 100755 index 000000000..00e8d5a4b --- /dev/null +++ b/ee/utilities/run-dev.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -a +source .env +set +a + +npm start \ No newline at end of file diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index 77cfd5067..fef572a52 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -283,7 +283,7 @@ module.exports = { wsRouter, start: (server, prefix) => { createSocketIOServer(server, prefix); - io.use(async (socket, next) => await authorizer.check(socket, next)); + // io.use(async (socket, next) => await authorizer.check(socket, next)); io.on('connection', async (socket) => { socket.on(EVENTS_DEFINITION.listen.ERROR, err => errorHandler(EVENTS_DEFINITION.listen.ERROR, err)); debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`); @@ -309,7 +309,8 @@ module.exports = { debug && console.log(`notifying new agent about no SESSIONS`); io.to(socket.id).emit(EVENTS_DEFINITION.emit.NO_SESSIONS); } - await io.of('/').adapter.remoteJoin(socket.id, socket.peerId); + // await io.of('/').adapter.join(socket.id, socket.peerId); + await socket.join(socket.peerId); let rooms = await io.of('/').adapter.allRooms(); if (rooms.has(socket.peerId)) { let connectedSockets = await io.in(socket.peerId).fetchSockets(); diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index bf65789f2..c906b5987 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -287,7 +287,7 @@ module.exports = { debug && console.log(`notifying new agent about no SESSIONS`); io.to(socket.id).emit(EVENTS_DEFINITION.emit.NO_SESSIONS); } - socket.join(socket.peerId); + await socket.join(socket.peerId); if (io.sockets.adapter.rooms.get(socket.peerId)) { debug && console.log(`${socket.id} joined room:${socket.peerId}, as:${socket.identity}, members:${io.sockets.adapter.rooms.get(socket.peerId).size}`); } diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index 615390996..f5d029bc2 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -268,7 +268,7 @@ module.exports = { debug && console.log(`notifying new agent about no SESSIONS`); io.to(socket.id).emit(EVENTS_DEFINITION.emit.NO_SESSIONS); } - socket.join(socket.peerId); + await socket.join(socket.peerId); if (io.sockets.adapter.rooms.get(socket.peerId)) { debug && console.log(`${socket.id} joined room:${socket.peerId}, as:${socket.identity}, members:${io.sockets.adapter.rooms.get(socket.peerId).size}`); } From f158596e5e5e0f50c52f5bba69c5fcbb5a6f7ba2 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Feb 2023 11:31:19 +0100 Subject: [PATCH 006/218] chore(actions): changing paths --- .github/workflows/utilities-ee.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/utilities-ee.yaml b/.github/workflows/utilities-ee.yaml index f9a1ac677..92270dc3d 100644 --- a/.github/workflows/utilities-ee.yaml +++ b/.github/workflows/utilities-ee.yaml @@ -6,6 +6,8 @@ on: - dev paths: - "ee/utilities/**" + - "!ee/utilities/.gitignore" + - "!ee/utilities/*-dev.sh" - "utilities/*/**" - "!utilities/.gitignore" - "!utilities/*-dev.sh" From 1721683eb89b8e8ab47e9499a8d2b37bd1fbc487 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Feb 2023 11:31:48 +0100 Subject: [PATCH 007/218] chore(actions): changing branches --- .github/workflows/utilities-ee.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/utilities-ee.yaml b/.github/workflows/utilities-ee.yaml index 92270dc3d..d28f72968 100644 --- a/.github/workflows/utilities-ee.yaml +++ b/.github/workflows/utilities-ee.yaml @@ -3,7 +3,7 @@ on: workflow_dispatch: push: branches: - - dev + - api-v1.10.0 paths: - "ee/utilities/**" - "!ee/utilities/.gitignore" From 15f4f0e45beac0a3159c84b81c6595237164bf24 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Thu, 16 Feb 2023 14:46:03 +0100 Subject: [PATCH 008/218] ci(actions): Adding assist GH actions Signed-off-by: rjshrjndrn --- .github/workflows/assist-ee.yaml | 120 ++++++++++++++++++++++++++++ .github/workflows/assist.yaml | 120 ++++++++++++++++++++++++++++ .github/workflows/utilities-ee.yaml | 71 ---------------- .github/workflows/utilities.yaml | 68 ---------------- 4 files changed, 240 insertions(+), 139 deletions(-) create mode 100644 .github/workflows/assist-ee.yaml create mode 100644 .github/workflows/assist.yaml delete mode 100644 .github/workflows/utilities-ee.yaml delete mode 100644 .github/workflows/utilities.yaml diff --git a/.github/workflows/assist-ee.yaml b/.github/workflows/assist-ee.yaml new file mode 100644 index 000000000..78a783dd1 --- /dev/null +++ b/.github/workflows/assist-ee.yaml @@ -0,0 +1,120 @@ +# This action will push the assist changes to aws +on: + workflow_dispatch: + push: + branches: + - dev + paths: + - "ee/utilities/**" + - "utilities/*/**" + - "!utilities/.gitignore" + - "!utilities/*-dev.sh" + +name: Build and Deploy Assist EE + +jobs: + deploy: + name: Deploy + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + # We need to diff with old commit + # to see which workers got changed. + fetch-depth: 2 + + - name: Docker login + run: | + docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}" + + - uses: azure/k8s-set-context@v1 + with: + method: kubeconfig + kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret. + id: setcontext + + - name: Building and Pushing Assist image + id: build-image + env: + DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} + IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee + ENVIRONMENT: staging + run: | + skip_security_checks=${{ github.event.inputs.skip_security_checks }} + cd utilities + PUSH_IMAGE=0 bash -x ./build.sh ee + [[ "x$skip_security_checks" == "xtrue" ]] || { + curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ + images=("assist") + for image in ${images[*]};do + ./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG + done + err_code=$? + [[ $err_code -ne 0 ]] && { + exit $err_code + } + } && { + echo "Skipping Security Checks" + } + images=("assist") + for image in ${images[*]};do + docker push $DOCKER_REPO/$image:$IMAGE_TAG + done + - name: Creating old image input + run: | + # + # Create yaml with existing image tags + # + kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\ + tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt + + echo > /tmp/image_override.yaml + + for line in `cat /tmp/image_tag.txt`; + do + image_array=($(echo "$line" | tr ':' '\n')) + cat <> /tmp/image_override.yaml + ${image_array[0]}: + image: + # We've to strip off the -ee, as helm will append it. + tag: `echo ${image_array[1]} | cut -d '-' -f 1` + EOF + done + - name: Deploy to kubernetes + run: | + cd scripts/helmcharts/ + + ## Update secerts + sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml + sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.EE_PG_PASSWORD }}\"/g" vars.yaml + sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\"/g" vars.yaml + sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.EE_MINIO_SECRET_KEY }}\"/g" vars.yaml + sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.EE_JWT_SECRET }}\"/g" vars.yaml + sed -i "s/domainName: \"\"/domainName: \"${{ secrets.EE_DOMAIN_NAME }}\"/g" vars.yaml + sed -i "s/enterpriseEditionLicense: \"\"/enterpriseEditionLicense: \"${{ secrets.EE_LICENSE_KEY }}\"/g" vars.yaml + + # Update changed image tag + sed -i "/assist/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml + + cat /tmp/image_override.yaml + # Deploy command + mv openreplay/charts/{ingress-nginx,chalice,quickwit} /tmp + rm -rf openreplay/charts/* + mv /tmp/{ingress-nginx,chalice,quickwit} openreplay/charts/ + helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f - + env: + DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} + # We're not passing -ee flag, because helm will add that. + IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} + ENVIRONMENT: staging + + # - name: Debug Job + # if: ${{ failure() }} + # uses: mxschmitt/action-tmate@v3 + # env: + # DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} + # IMAGE_TAG: ${{ github.sha }} + # ENVIRONMENT: staging + # diff --git a/.github/workflows/assist.yaml b/.github/workflows/assist.yaml new file mode 100644 index 000000000..cf4d184cf --- /dev/null +++ b/.github/workflows/assist.yaml @@ -0,0 +1,120 @@ +# This action will push the assist changes to aws +on: + workflow_dispatch: + push: + branches: + - dev + paths: + - "ee/utilities/**" + - "utilities/*/**" + - "!utilities/.gitignore" + - "!utilities/*-dev.sh" + +name: Build and Deploy Assist EE + +jobs: + deploy: + name: Deploy + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + # We need to diff with old commit + # to see which workers got changed. + fetch-depth: 2 + + - name: Docker login + run: | + docker login ${{ secrets.OSS_REGISTRY_URL }} -u ${{ secrets.OSS_DOCKER_USERNAME }} -p "${{ secrets.OSS_REGISTRY_TOKEN }}" + + - uses: azure/k8s-set-context@v1 + with: + method: kubeconfig + kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret. + id: setcontext + + - name: Building and Pushing Assist image + id: build-image + env: + DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} + IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee + ENVIRONMENT: staging + run: | + skip_security_checks=${{ github.event.inputs.skip_security_checks }} + cd utilities + PUSH_IMAGE=0 bash -x ./build.sh ee + [[ "x$skip_security_checks" == "xtrue" ]] || { + curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ + images=("assist") + for image in ${images[*]};do + ./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG + done + err_code=$? + [[ $err_code -ne 0 ]] && { + exit $err_code + } + } && { + echo "Skipping Security Checks" + } + images=("assist") + for image in ${images[*]};do + docker push $DOCKER_REPO/$image:$IMAGE_TAG + done + - name: Creating old image input + run: | + # + # Create yaml with existing image tags + # + kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\ + tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt + + echo > /tmp/image_override.yaml + + for line in `cat /tmp/image_tag.txt`; + do + image_array=($(echo "$line" | tr ':' '\n')) + cat <> /tmp/image_override.yaml + ${image_array[0]}: + image: + # We've to strip off the -ee, as helm will append it. + tag: `echo ${image_array[1]} | cut -d '-' -f 1` + EOF + done + - name: Deploy to kubernetes + run: | + cd scripts/helmcharts/ + + ## Update secerts + sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml + sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.OSS_PG_PASSWORD }}\"/g" vars.yaml + sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\"/g" vars.yaml + sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\"/g" vars.yaml + sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.OSS_JWT_SECRET }}\"/g" vars.yaml + sed -i "s/domainName: \"\"/domainName: \"${{ secrets.OSS_DOMAIN_NAME }}\"/g" vars.yaml + sed -i "s/enterpriseEditionLicense: \"\"/enterpriseEditionLicense: \"${{ secrets.OSS_LICENSE_KEY }}\"/g" vars.yaml + + # Update changed image tag + sed -i "/assist/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml + + cat /tmp/image_override.yaml + # Deploy command + mv openreplay/charts/{ingress-nginx,chalice,quickwit} /tmp + rm -rf openreplay/charts/* + mv /tmp/{ingress-nginx,chalice,quickwit} openreplay/charts/ + helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f - + env: + DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} + # We're not passing -ee flag, because helm will add that. + IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} + ENVIRONMENT: staging + + # - name: Debug Job + # if: ${{ failure() }} + # uses: mxschmitt/action-tmate@v3 + # env: + # DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} + # IMAGE_TAG: ${{ github.sha }} + # ENVIRONMENT: staging + # diff --git a/.github/workflows/utilities-ee.yaml b/.github/workflows/utilities-ee.yaml deleted file mode 100644 index d28f72968..000000000 --- a/.github/workflows/utilities-ee.yaml +++ /dev/null @@ -1,71 +0,0 @@ -# This action will push the assist changes to aws -on: - workflow_dispatch: - push: - branches: - - api-v1.10.0 - paths: - - "ee/utilities/**" - - "!ee/utilities/.gitignore" - - "!ee/utilities/*-dev.sh" - - "utilities/*/**" - - "!utilities/.gitignore" - - "!utilities/*-dev.sh" - -name: Build and Deploy Assist EE - -jobs: - deploy: - name: Deploy - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v2 - with: - # We need to diff with old commit - # to see which workers got changed. - fetch-depth: 2 - - - name: Docker login - run: | - docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}" - - - uses: azure/k8s-set-context@v1 - with: - method: kubeconfig - kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret. - id: setcontext - - - name: Building and Pushing api image - id: build-image - env: - DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} - IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee - ENVIRONMENT: staging - run: | - cd utilities - PUSH_IMAGE=1 bash build.sh ee - - name: Deploy to kubernetes - run: | - cd scripts/helmcharts/ - sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.EE_REGISTRY_URL }}\"#g" vars.yaml - sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\" #g" vars.yaml - sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.EE_MINIO_SECRET_KEY }}\" #g" vars.yaml - sed -i "s#domain_name.*#domain_name: \"ee.openreplay.com\" #g" vars.yaml - sed -i "s#kubeconfig.*#kubeconfig_path: ${EE_KUBECONFIG}#g" vars.yaml - sed -i "s/image_tag:.*/image_tag: \"$IMAGE_TAG\"/g" vars.yaml - bash kube-install.sh --app utilities - env: - DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} - IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} - ENVIRONMENT: staging - - # - name: Debug Job - # if: ${{ failure() }} - # uses: mxschmitt/action-tmate@v3 - # env: - # DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} - # IMAGE_TAG: ${{ github.sha }} - # ENVIRONMENT: staging - # diff --git a/.github/workflows/utilities.yaml b/.github/workflows/utilities.yaml deleted file mode 100644 index 7d2792d9b..000000000 --- a/.github/workflows/utilities.yaml +++ /dev/null @@ -1,68 +0,0 @@ -# This action will push the assist changes to aws -on: - workflow_dispatch: - push: - branches: - - api-v1.10.0 - paths: - - "utilities/**" - - "!utilities/.gitignore" - - "!utilities/*-dev.sh" - -name: Build and Deploy Assist - -jobs: - deploy: - name: Deploy - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v2 - with: - # We need to diff with old commit - # to see which workers got changed. - fetch-depth: 2 - - - name: Docker login - run: | - docker login ${{ secrets.OSS_REGISTRY_URL }} -u ${{ secrets.OSS_DOCKER_USERNAME }} -p "${{ secrets.OSS_REGISTRY_TOKEN }}" - - - uses: azure/k8s-set-context@v1 - with: - method: kubeconfig - kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret. - id: setcontext - - - name: Building and Pushing api image - id: build-image - env: - DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} - IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} - ENVIRONMENT: staging - run: | - cd utilities - PUSH_IMAGE=1 bash build.sh - - name: Deploy to kubernetes - run: | - cd scripts/helmcharts/ - sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml - sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\" #g" vars.yaml - sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\" #g" vars.yaml - sed -i "s#domain_name.*#domain_name: \"foss.openreplay.com\" #g" vars.yaml - sed -i "s#kubeconfig.*#kubeconfig_path: ${KUBECONFIG}#g" vars.yaml - sed -i "s/image_tag:.*/image_tag: \"$IMAGE_TAG\"/g" vars.yaml - bash kube-install.sh --app utilities - env: - DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} - IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} - ENVIRONMENT: staging - - # - name: Debug Job - # if: ${{ failure() }} - # uses: mxschmitt/action-tmate@v3 - # env: - # DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} - # IMAGE_TAG: ${{ github.sha }} - # ENVIRONMENT: staging - # From a52adbd4f509ff8bf27cbf3ef683e0099a773aed Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Feb 2023 17:11:25 +0100 Subject: [PATCH 009/218] feat(assist): support missing protocol --- ee/utilities/servers/websocket-cluster.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index 6aa2bade5..77cfd5067 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -24,7 +24,7 @@ const { const {createAdapter} = require("@socket.io/redis-adapter"); const {createClient} = require("redis"); const wsRouter = express.Router(); -const REDIS_URL = process.env.REDIS_URL || "redis://localhost:6379"; +const REDIS_URL = (process.env.REDIS_URL || "localhost:6379").replace(/((^\w+:|^)\/\/|^)/, 'redis://'); const pubClient = createClient({url: REDIS_URL}); const subClient = pubClient.duplicate(); console.log(`Using Redis: ${REDIS_URL}`); From 80007f45aee7dc343ef39d081f1d489f02f5e5ee Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Feb 2023 18:25:58 +0100 Subject: [PATCH 010/218] feat(assist): upgrade changes --- ee/utilities/.gitignore | 1 - ee/utilities/run-dev.sh | 6 ++++++ ee/utilities/servers/websocket-cluster.js | 5 +++-- ee/utilities/servers/websocket.js | 2 +- utilities/servers/websocket.js | 2 +- 5 files changed, 11 insertions(+), 5 deletions(-) create mode 100755 ee/utilities/run-dev.sh diff --git a/ee/utilities/.gitignore b/ee/utilities/.gitignore index 8c9dca279..cd68b1ffb 100644 --- a/ee/utilities/.gitignore +++ b/ee/utilities/.gitignore @@ -15,5 +15,4 @@ servers/sourcemaps-server.js /utils/helper.js /utils/assistHelper.js .local -run-dev.sh *.mmdb diff --git a/ee/utilities/run-dev.sh b/ee/utilities/run-dev.sh new file mode 100755 index 000000000..00e8d5a4b --- /dev/null +++ b/ee/utilities/run-dev.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -a +source .env +set +a + +npm start \ No newline at end of file diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index 77cfd5067..fef572a52 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -283,7 +283,7 @@ module.exports = { wsRouter, start: (server, prefix) => { createSocketIOServer(server, prefix); - io.use(async (socket, next) => await authorizer.check(socket, next)); + // io.use(async (socket, next) => await authorizer.check(socket, next)); io.on('connection', async (socket) => { socket.on(EVENTS_DEFINITION.listen.ERROR, err => errorHandler(EVENTS_DEFINITION.listen.ERROR, err)); debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`); @@ -309,7 +309,8 @@ module.exports = { debug && console.log(`notifying new agent about no SESSIONS`); io.to(socket.id).emit(EVENTS_DEFINITION.emit.NO_SESSIONS); } - await io.of('/').adapter.remoteJoin(socket.id, socket.peerId); + // await io.of('/').adapter.join(socket.id, socket.peerId); + await socket.join(socket.peerId); let rooms = await io.of('/').adapter.allRooms(); if (rooms.has(socket.peerId)) { let connectedSockets = await io.in(socket.peerId).fetchSockets(); diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index bf65789f2..c906b5987 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -287,7 +287,7 @@ module.exports = { debug && console.log(`notifying new agent about no SESSIONS`); io.to(socket.id).emit(EVENTS_DEFINITION.emit.NO_SESSIONS); } - socket.join(socket.peerId); + await socket.join(socket.peerId); if (io.sockets.adapter.rooms.get(socket.peerId)) { debug && console.log(`${socket.id} joined room:${socket.peerId}, as:${socket.identity}, members:${io.sockets.adapter.rooms.get(socket.peerId).size}`); } diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index 615390996..f5d029bc2 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -268,7 +268,7 @@ module.exports = { debug && console.log(`notifying new agent about no SESSIONS`); io.to(socket.id).emit(EVENTS_DEFINITION.emit.NO_SESSIONS); } - socket.join(socket.peerId); + await socket.join(socket.peerId); if (io.sockets.adapter.rooms.get(socket.peerId)) { debug && console.log(`${socket.id} joined room:${socket.peerId}, as:${socket.identity}, members:${io.sockets.adapter.rooms.get(socket.peerId).size}`); } From ffdb16d8995e43649f410034b644aaf11d4cd48c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 16 Feb 2023 18:58:32 +0100 Subject: [PATCH 011/218] feat(assist): upgrade changes --- ee/utilities/servers/websocket-cluster.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index fef572a52..e129bfcb6 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -283,7 +283,7 @@ module.exports = { wsRouter, start: (server, prefix) => { createSocketIOServer(server, prefix); - // io.use(async (socket, next) => await authorizer.check(socket, next)); + io.use(async (socket, next) => await authorizer.check(socket, next)); io.on('connection', async (socket) => { socket.on(EVENTS_DEFINITION.listen.ERROR, err => errorHandler(EVENTS_DEFINITION.listen.ERROR, err)); debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`); From a9659823a705bf26fc3283dc5ea19d15bc0ae385 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 17 Feb 2023 13:04:59 +0100 Subject: [PATCH 012/218] fix(ui) - seriesName in alert list item --- .../components/Dashboard/components/Alerts/AlertListItem.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx index e3412bdce..78f2aa24f 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx @@ -127,7 +127,7 @@ function AlertListItem(props: Props) { {'When the '} {alert.detectionMethod} {' of '} - {alert.seriesName} + {alert.seriesName || alert.query.left} {' is '} {alert.query.operator} From a5bab0a438217ec253be430777b6bf8e36f4f38c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 17 Feb 2023 13:10:34 +0100 Subject: [PATCH 013/218] feat(alerts): fixed no-events-join builder --- api/app_alerts.py | 7 +++++++ api/chalicelib/core/alerts_processor.py | 19 ++++++++++--------- api/run-alerts-dev.sh | 3 +++ ee/api/chalicelib/core/alerts_processor.py | 16 +++++++++------- .../chalicelib/core/alerts_processor_exp.py | 10 ++++++++-- 5 files changed, 37 insertions(+), 18 deletions(-) create mode 100755 api/run-alerts-dev.sh diff --git a/api/app_alerts.py b/api/app_alerts.py index 7107423de..111bad2a1 100644 --- a/api/app_alerts.py +++ b/api/app_alerts.py @@ -53,3 +53,10 @@ async def stop_server(): await shutdown() import os, signal os.kill(1, signal.SIGTERM) + + +if config("LOCAL_DEV", default=False, cast=bool): + @app.get('/private/trigger', tags=["private"]) + async def trigger_main_cron(): + logging.info("Triggering main cron") + alerts_processor.process() diff --git a/api/chalicelib/core/alerts_processor.py b/api/chalicelib/core/alerts_processor.py index 8049b2f39..4babe64ce 100644 --- a/api/chalicelib/core/alerts_processor.py +++ b/api/chalicelib/core/alerts_processor.py @@ -49,10 +49,12 @@ LeftToDb = { schemas.AlertColumn.errors__4xx_5xx__count: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)", "condition": "status/100!=2"}, - schemas.AlertColumn.errors__4xx__count: {"table": "events.resources INNER JOIN public.sessions USING(session_id)", - "formula": "COUNT(session_id)", "condition": "status/100=4"}, - schemas.AlertColumn.errors__5xx__count: {"table": "events.resources INNER JOIN public.sessions USING(session_id)", - "formula": "COUNT(session_id)", "condition": "status/100=5"}, + schemas.AlertColumn.errors__4xx__count: { + "table": "events.resources INNER JOIN public.sessions USING(session_id)", + "formula": "COUNT(session_id)", "condition": "status/100=4"}, + schemas.AlertColumn.errors__5xx__count: { + "table": "events.resources INNER JOIN public.sessions USING(session_id)", + "formula": "COUNT(session_id)", "condition": "status/100=5"}, schemas.AlertColumn.errors__javascript__impacted_sessions__count: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(DISTINCT session_id)", "condition": "success= FALSE AND type='script'"}, @@ -95,7 +97,7 @@ def can_check(a) -> bool: a["options"].get("lastNotification") is None or a["options"]["lastNotification"] <= 0 or ((now - a["options"]["lastNotification"]) > a["options"]["renotifyInterval"] * 60 * 1000)) \ - and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000 + and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000 def Build(a): @@ -119,7 +121,7 @@ def Build(a): subQ = f"""SELECT {colDef["formula"]} AS value FROM {colDef["table"]} WHERE project_id = %(project_id)s - {"AND " + colDef["condition"] if colDef.get("condition") is not None else ""}""" + {"AND " + colDef["condition"] if colDef.get("condition") else ""}""" j_s = colDef.get("joinSessions", True) main_table = colDef["table"] is_ss = main_table == "public.sessions" @@ -142,8 +144,7 @@ def Build(a): "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000, "timestamp_sub2": TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000} else: - sub1 = f"""{subQ} AND timestamp>=%(startDate)s - AND timestamp<=%(now)s + sub1 = f"""{subQ} {"AND timestamp >= %(startDate)s AND timestamp <= %(now)s" if not is_ss else ""} {"AND start_ts >= %(startDate)s AND start_ts <= %(now)s" if j_s else ""}""" params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000 sub2 = f"""{subQ} {"AND timestamp < %(startDate)s AND timestamp >= %(timestamp_sub2)s" if not is_ss else ""} @@ -206,7 +207,7 @@ def process(): cur = cur.recreate(rollback=True) if len(notifications) > 0: cur.execute( - cur.mogrify(f"""UPDATE public.Alerts + cur.mogrify(f"""UPDATE public.alerts SET options = options||'{{"lastNotification":{TimeUTC.now()}}}'::jsonb WHERE alert_id IN %(ids)s;""", {"ids": tuple([n["alertId"] for n in notifications])})) if len(notifications) > 0: diff --git a/api/run-alerts-dev.sh b/api/run-alerts-dev.sh new file mode 100755 index 000000000..54db30171 --- /dev/null +++ b/api/run-alerts-dev.sh @@ -0,0 +1,3 @@ +#!/bin/zsh + +uvicorn app_alerts:app --reload \ No newline at end of file diff --git a/ee/api/chalicelib/core/alerts_processor.py b/ee/api/chalicelib/core/alerts_processor.py index 69a0f7f5f..06663336c 100644 --- a/ee/api/chalicelib/core/alerts_processor.py +++ b/ee/api/chalicelib/core/alerts_processor.py @@ -54,10 +54,12 @@ LeftToDb = { schemas.AlertColumn.errors__4xx_5xx__count: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)", "condition": "status/100!=2"}, - schemas.AlertColumn.errors__4xx__count: {"table": "events.resources INNER JOIN public.sessions USING(session_id)", - "formula": "COUNT(session_id)", "condition": "status/100=4"}, - schemas.AlertColumn.errors__5xx__count: {"table": "events.resources INNER JOIN public.sessions USING(session_id)", - "formula": "COUNT(session_id)", "condition": "status/100=5"}, + schemas.AlertColumn.errors__4xx__count: { + "table": "events.resources INNER JOIN public.sessions USING(session_id)", + "formula": "COUNT(session_id)", "condition": "status/100=4"}, + schemas.AlertColumn.errors__5xx__count: { + "table": "events.resources INNER JOIN public.sessions USING(session_id)", + "formula": "COUNT(session_id)", "condition": "status/100=5"}, schemas.AlertColumn.errors__javascript__impacted_sessions__count: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(DISTINCT session_id)", "condition": "success= FALSE AND type='script'"}, @@ -100,7 +102,7 @@ def can_check(a) -> bool: a["options"].get("lastNotification") is None or a["options"]["lastNotification"] <= 0 or ((now - a["options"]["lastNotification"]) > a["options"]["renotifyInterval"] * 60 * 1000)) \ - and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000 + and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000 def Build(a): @@ -124,7 +126,7 @@ def Build(a): subQ = f"""SELECT {colDef["formula"]} AS value FROM {colDef["table"]} WHERE project_id = %(project_id)s - {"AND " + colDef["condition"] if colDef.get("condition") is not None else ""}""" + {"AND " + colDef["condition"] if colDef.get("condition") else ""}""" j_s = colDef.get("joinSessions", True) main_table = colDef["table"] is_ss = main_table == "public.sessions" @@ -211,7 +213,7 @@ def process(): cur = cur.recreate(rollback=True) if len(notifications) > 0: cur.execute( - cur.mogrify(f"""UPDATE public.Alerts + cur.mogrify(f"""UPDATE public.alerts SET options = options||'{{"lastNotification":{TimeUTC.now()}}}'::jsonb WHERE alert_id IN %(ids)s;""", {"ids": tuple([n["alertId"] for n in notifications])})) if len(notifications) > 0: diff --git a/ee/api/chalicelib/core/alerts_processor_exp.py b/ee/api/chalicelib/core/alerts_processor_exp.py index 7a300654c..0d8b7753c 100644 --- a/ee/api/chalicelib/core/alerts_processor_exp.py +++ b/ee/api/chalicelib/core/alerts_processor_exp.py @@ -135,7 +135,7 @@ def Build(a): FROM {colDef["table"](now)} WHERE project_id = %(project_id)s {"AND event_type=%(event_type)s" if params["event_type"] else ""} - {"AND " + colDef["condition"] if colDef.get("condition") is not None else ""}""" + {"AND " + colDef["condition"] if colDef.get("condition") else ""}""" q = f"""SELECT coalesce(value,0) AS value, coalesce(value,0) {a["query"]["operator"]} {a["query"]["right"]} AS valid""" @@ -200,7 +200,13 @@ def process(): if alerts_processor.can_check(alert): logging.info(f"Querying alertId:{alert['alertId']} name: {alert['name']}") query, params = Build(alert) - query = ch_cur.format(query, params) + try: + query = ch_cur.format(query, params) + except Exception as e: + logging.error( + f"!!!Error while building alert query for alertId:{alert['alertId']} name: {alert['name']}") + logging.error(e) + continue logging.debug(alert) logging.debug(query) try: From 5d94e72da2836abdcde01bab06aefd03851b6333 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 17 Feb 2023 12:57:29 +0100 Subject: [PATCH 014/218] fix(ui) - alerts list pagination reset --- .../Dashboard/components/Alerts/AlertsList.tsx | 4 ++-- .../Dashboard/components/Alerts/AlertsView.tsx | 16 +++++++++++++++- frontend/app/mstore/alertsStore.ts | 6 ++++++ 3 files changed, 23 insertions(+), 3 deletions(-) diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertsList.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertsList.tsx index e4005098e..d1d4c84ef 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertsList.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertsList.tsx @@ -17,10 +17,10 @@ function AlertsList({ siteId }: Props) { const { alertsStore, settingsStore } = useStore(); const { fetchWebhooks, webhooks } = settingsStore const { alerts: alertsList, alertsSearch, fetchList, init } = alertsStore + const page = alertsStore.page; React.useEffect(() => { fetchList(); fetchWebhooks() }, []); const alertsArray = alertsList - const [page, setPage] = React.useState(1); const filteredAlerts = filterList(alertsArray, alertsSearch, ['name'], (item, query) => query.test(item.query.left)) const list = alertsSearch !== '' ? filteredAlerts : alertsArray; @@ -59,7 +59,7 @@ function AlertsList({ siteId }: Props) { setPage(page)} + onPageChange={(page) => alertsStore.updateKey('page', page)} limit={pageSize} debounceRequest={100} /> diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertsView.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertsView.tsx index 631df8e43..544c86f8f 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertsView.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertsView.tsx @@ -1,16 +1,30 @@ -import React from 'react'; +import React, { useEffect } from 'react'; import { Button, PageTitle, Icon, Link } from 'UI'; import withPageTitle from 'HOCs/withPageTitle'; import { withSiteId, alertCreate } from 'App/routes'; import AlertsList from './AlertsList'; import AlertsSearch from './AlertsSearch'; +import { useHistory } from 'react-router'; +import { useStore } from 'App/mstore'; interface IAlertsView { siteId: string; } function AlertsView({ siteId }: IAlertsView) { + const history = useHistory(); + const { alertsStore } = useStore(); + + + useEffect(() => { + const unmount = history.listen((location) => { + if (!location.pathname.includes('/alert')) { + alertsStore.updateKey('page', 1); + } + }); + return unmount; + }, [history]); return (
diff --git a/frontend/app/mstore/alertsStore.ts b/frontend/app/mstore/alertsStore.ts index d377af81e..e608c1873 100644 --- a/frontend/app/mstore/alertsStore.ts +++ b/frontend/app/mstore/alertsStore.ts @@ -9,6 +9,7 @@ export default class AlertsStore { // @ts-ignore instance: Alert = new Alert({}, false); loading = false + page: number = 1; constructor() { makeAutoObservable(this); @@ -16,6 +17,11 @@ export default class AlertsStore { changeSearch = (value: string) => { this.alertsSearch = value; + this.page = 1; + } + + updateKey(key: string, value: any) { + this[key] = value } fetchList = async () => { From 1028e2f9482024203920372521dc3655c0a03516 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 17 Feb 2023 13:04:59 +0100 Subject: [PATCH 015/218] fix(ui) - seriesName in alert list item --- .../components/Dashboard/components/Alerts/AlertListItem.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx index e3412bdce..78f2aa24f 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx @@ -127,7 +127,7 @@ function AlertListItem(props: Props) { {'When the '} {alert.detectionMethod} {' of '} - {alert.seriesName} + {alert.seriesName || alert.query.left} {' is '} {alert.query.operator} From b226f2bbb934cf999ccfb9fc8278900404a560e4 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 17 Feb 2023 13:24:41 +0100 Subject: [PATCH 016/218] feat(alerts): changes --- ee/api/chalicelib/core/alerts_processor_exp.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ee/api/chalicelib/core/alerts_processor_exp.py b/ee/api/chalicelib/core/alerts_processor_exp.py index 0d8b7753c..37a1b843f 100644 --- a/ee/api/chalicelib/core/alerts_processor_exp.py +++ b/ee/api/chalicelib/core/alerts_processor_exp.py @@ -198,7 +198,6 @@ def process(): if alert["query"]["left"] != "CUSTOM": continue if alerts_processor.can_check(alert): - logging.info(f"Querying alertId:{alert['alertId']} name: {alert['name']}") query, params = Build(alert) try: query = ch_cur.format(query, params) From 5fff5cbad5ee8c59ab703358ea575ed174e9839b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 17 Feb 2023 15:24:13 +0100 Subject: [PATCH 017/218] feat(alerts): changes feat(chalice): changes --- ee/api/chalicelib/core/__init__.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/ee/api/chalicelib/core/__init__.py b/ee/api/chalicelib/core/__init__.py index 64529b782..62723d0f1 100644 --- a/ee/api/chalicelib/core/__init__.py +++ b/ee/api/chalicelib/core/__init__.py @@ -6,41 +6,41 @@ logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) from . import sessions as sessions_legacy if config("EXP_SESSIONS_SEARCH", cast=bool, default=False): - print(">>> Using experimental sessions search") + logging.info(">>> Using experimental sessions search") from . import sessions_exp as sessions else: from . import sessions as sessions if config("EXP_AUTOCOMPLETE", cast=bool, default=False): - print(">>> Using experimental autocomplete") + logging.info(">>> Using experimental autocomplete") from . import autocomplete_exp as autocomplete else: from . import autocomplete as autocomplete if config("EXP_ERRORS_SEARCH", cast=bool, default=False): - print(">>> Using experimental error search") + logging.info(">>> Using experimental error search") from . import errors as errors_legacy from . import errors_exp as errors if config("EXP_ERRORS_GET", cast=bool, default=False): - print(">>> Using experimental error get") + logging.info(">>> Using experimental error get") else: from . import errors as errors if config("EXP_METRICS", cast=bool, default=False): - print(">>> Using experimental metrics") + logging.info(">>> Using experimental metrics") from . import metrics_exp as metrics else: from . import metrics as metrics if config("EXP_ALERTS", cast=bool, default=False): - print(">>> Using experimental alerts") + logging.info(">>> Using experimental alerts") from . import alerts_processor_exp as alerts_processor else: from . import alerts_processor as alerts_processor if config("EXP_FUNNELS", cast=bool, default=False): - print(">>> Using experimental funnels") + logging.info(">>> Using experimental funnels") if not config("EXP_SESSIONS_SEARCH", cast=bool, default=False): from . import sessions as sessions_legacy @@ -49,4 +49,4 @@ else: from . import significance as significance if config("EXP_RESOURCES", cast=bool, default=False): - print(">>> Using experimental resources for session-replay") + logging.info(">>> Using experimental resources for session-replay") From d5b5b56ba1b02126380700234e50435a3625c0a0 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 17 Feb 2023 15:24:33 +0100 Subject: [PATCH 018/218] change(ui) - show percentage based on trigger option --- .../components/Dashboard/components/Alerts/AlertListItem.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx index 78f2aa24f..acef2a71c 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx @@ -131,7 +131,7 @@ function AlertListItem(props: Props) { {' is '} {alert.query.operator} - {numberWithCommas(alert.query.right)} {alert.metric?.unit} + {numberWithCommas(alert.query.right)} {alert.change === 'percent' ? '%' : alert.metric?.unit} {' over the past '} {getThreshold( From a8c2539ffdf881cff7924ac86086a78de0ca4283 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 17 Feb 2023 15:29:07 +0100 Subject: [PATCH 019/218] change(ui) - unit space --- .../components/Dashboard/components/Alerts/AlertListItem.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx index acef2a71c..024cc734c 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx @@ -131,7 +131,7 @@ function AlertListItem(props: Props) { {' is '} {alert.query.operator} - {numberWithCommas(alert.query.right)} {alert.change === 'percent' ? '%' : alert.metric?.unit} + {numberWithCommas(alert.query.right)}{alert.change === 'percent' ? '%' : alert.metric?.unit} {' over the past '} {getThreshold( From 0fdcefe6e9455d0999b35ac4ee0ae29bb5fa7fca Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 17 Feb 2023 15:38:56 +0100 Subject: [PATCH 020/218] chore(actions): changes --- .github/workflows/assist.yaml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/assist.yaml b/.github/workflows/assist.yaml index cf4d184cf..c599d5cbd 100644 --- a/.github/workflows/assist.yaml +++ b/.github/workflows/assist.yaml @@ -5,12 +5,11 @@ on: branches: - dev paths: - - "ee/utilities/**" - "utilities/*/**" - "!utilities/.gitignore" - "!utilities/*-dev.sh" -name: Build and Deploy Assist EE +name: Build and Deploy Assist jobs: deploy: @@ -21,7 +20,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 with: - # We need to diff with old commit + # We need to diff with old commit # to see which workers got changed. fetch-depth: 2 @@ -39,12 +38,12 @@ jobs: id: build-image env: DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} - IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee + IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} ENVIRONMENT: staging run: | skip_security_checks=${{ github.event.inputs.skip_security_checks }} cd utilities - PUSH_IMAGE=0 bash -x ./build.sh ee + PUSH_IMAGE=0 bash -x ./build.sh [[ "x$skip_security_checks" == "xtrue" ]] || { curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ images=("assist") From f8b8db3332ce561706f8399d2acd9b36a210b87f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 17 Feb 2023 15:55:40 +0100 Subject: [PATCH 021/218] feat(alerts): fixed exp-alerts with legacy-sessions-search --- ee/api/chalicelib/core/alerts_processor_exp.py | 3 ++- ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql | 1 - scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql | 1 - 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/ee/api/chalicelib/core/alerts_processor_exp.py b/ee/api/chalicelib/core/alerts_processor_exp.py index 37a1b843f..310c6faa9 100644 --- a/ee/api/chalicelib/core/alerts_processor_exp.py +++ b/ee/api/chalicelib/core/alerts_processor_exp.py @@ -4,9 +4,10 @@ from decouple import config import schemas from chalicelib.core import alerts_listener, alerts_processor -from chalicelib.core import sessions, alerts +from chalicelib.core import alerts from chalicelib.utils import pg_client, ch_client, exp_ch_helper from chalicelib.utils.TimeUTC import TimeUTC +from chalicelib.core import sessions_exp as sessions logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql index 30961fc88..2dd8815cc 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql @@ -138,7 +138,6 @@ ALTER TABLE IF EXISTS projects ADD COLUMN IF NOT EXISTS beacon_size integer NOT NULL DEFAULT 0; -- To migrate saved search data --- SET client_min_messages TO NOTICE; -- SET client_min_messages TO NOTICE; CREATE OR REPLACE FUNCTION get_new_event_key(key text) diff --git a/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql b/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql index 76d6dd88b..8b5ee748f 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql @@ -112,7 +112,6 @@ ALTER TABLE IF EXISTS projects ADD COLUMN IF NOT EXISTS beacon_size integer NOT NULL DEFAULT 0; -- To migrate saved search data --- SET client_min_messages TO NOTICE; -- SET client_min_messages TO NOTICE; CREATE OR REPLACE FUNCTION get_new_event_key(key text) From cf781d14171fe20b2dc7c5e131f5f1bf7da3dae7 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 17 Feb 2023 16:48:34 +0100 Subject: [PATCH 022/218] feat(chalice): filters-events manual-split --- api/schemas.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/api/schemas.py b/api/schemas.py index ab057426a..683c05943 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -750,7 +750,8 @@ class SessionsSearchPayloadSchema(_PaginatedSchema): class FlatSessionsSearch(BaseModel): events: Optional[List[_SessionSearchEventSchema]] = Field([]) - filters: List[Union[SessionSearchFilterSchema, _SessionSearchEventSchema]] = Field([]) + # filters: List[Union[SessionSearchFilterSchema, _SessionSearchEventSchema]] = Field([]) + filters: List[SessionSearchFilterSchema] = Field([]) @root_validator(pre=True) def flat_to_original(cls, values): From c153e321db421a8f2610ee48224953565dcd5522 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 17 Feb 2023 17:09:17 +0100 Subject: [PATCH 023/218] feat(chalice): filters-events un-manual-split --- api/schemas.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/api/schemas.py b/api/schemas.py index 683c05943..ab057426a 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -750,8 +750,7 @@ class SessionsSearchPayloadSchema(_PaginatedSchema): class FlatSessionsSearch(BaseModel): events: Optional[List[_SessionSearchEventSchema]] = Field([]) - # filters: List[Union[SessionSearchFilterSchema, _SessionSearchEventSchema]] = Field([]) - filters: List[SessionSearchFilterSchema] = Field([]) + filters: List[Union[SessionSearchFilterSchema, _SessionSearchEventSchema]] = Field([]) @root_validator(pre=True) def flat_to_original(cls, values): From 8ee5839c1e8f57b9eaef2381223a2fb0c844a70c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 17 Feb 2023 18:11:27 +0100 Subject: [PATCH 024/218] feat(DB): migrate metric_series to new format --- .../db/init_dbs/postgresql/1.10.0/1.10.0.sql | 101 +++++++++++++++++- .../db/init_dbs/postgresql/1.10.0/1.10.0.sql | 101 +++++++++++++++++- 2 files changed, 200 insertions(+), 2 deletions(-) diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql index 2dd8815cc..68b115e46 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql @@ -139,7 +139,7 @@ ALTER TABLE IF EXISTS projects -- To migrate saved search data --- SET client_min_messages TO NOTICE; +SET client_min_messages TO NOTICE; CREATE OR REPLACE FUNCTION get_new_event_key(key text) RETURNS text AS $$ @@ -325,6 +325,105 @@ $$ $$ LANGUAGE plpgsql; + +-- To migrate saved metric_series data +DO +$$ + DECLARE + row RECORD; + events_att JSONB; + event_filters_att JSONB; + filters_att JSONB; + element JSONB; + s_element JSONB; + new_value TEXT; + new_events JSONB[]; + new_filters JSONB[]; + new_event_filters JSONB[]; + changed BOOLEAN; + planned_update JSONB[]; + BEGIN + planned_update := '{}'::jsonb[]; + FOR row IN SELECT * FROM metric_series + LOOP + -- Transform events attributes + events_att := row.filter -> 'events'; + IF events_att IS NOT NULL THEN + new_events := '{}'::jsonb[]; + FOR element IN SELECT jsonb_array_elements(events_att) + LOOP + changed := FALSE; + new_value := get_new_event_key(element ->> 'type'); + if new_value IS NOT NULL THEN + changed := TRUE; + new_value := replace(new_value, '"', ''); + element := element || jsonb_build_object('type', new_value); + END IF; + -- Transform event's sub-filters attributes + event_filters_att := element -> 'filters'; + new_event_filters := '{}'::jsonb[]; + IF event_filters_att IS NOT NULL AND jsonb_array_length(event_filters_att) > 0 THEN + FOR s_element IN SELECT jsonb_array_elements(event_filters_att) + LOOP + new_value := get_new_event_filter_key(s_element ->> 'type'); + if new_value IS NOT NULL THEN + changed := TRUE; + new_value := replace(new_value, '"', ''); + s_element := s_element || jsonb_build_object('type', new_value); + new_event_filters := array_append(new_event_filters, s_element); + END IF; + END LOOP; + element := element || jsonb_build_object('filters', new_event_filters); + END IF; + IF changed THEN + new_events := array_append(new_events, element); + END IF; + END LOOP; + IF array_length(new_events, 1) > 0 THEN + row.filter := row.filter || jsonb_build_object('events', new_events); + END IF; + END IF; + + -- Transform filters attributes + filters_att := row.filter -> 'filters'; + IF filters_att IS NOT NULL THEN + new_filters := '{}'::jsonb; + FOR element IN SELECT jsonb_array_elements(filters_att) + LOOP + new_value := get_new_filter_key(element ->> 'type'); + if new_value IS NOT NULL THEN + new_value := replace(new_value, '"', ''); + element := element || jsonb_build_object('type', new_value); + new_filters := array_append(new_filters, element); + END IF; + END LOOP; + IF array_length(new_filters, 1) > 0 THEN + row.filter := row.filter || jsonb_build_object('filters', new_filters); + END IF; + END IF; + + IF array_length(new_events, 1) > 0 OR array_length(new_filters, 1) > 0 THEN + planned_update := array_append(planned_update, + jsonb_build_object('id', row.series_id, 'change', row.filter)); + END IF; + END LOOP; + + -- Update metric_series + IF array_length(planned_update, 1) > 0 THEN + raise notice 'must update % elements',array_length(planned_update, 1); + + UPDATE metric_series + SET filter=changes.change -> 'change' + FROM (SELECT unnest(planned_update)) AS changes(change) + WHERE series_id = (changes.change -> 'id')::integer; + raise notice 'update done'; + ELSE + raise notice 'nothing to update'; + END IF; + END ; +$$ +LANGUAGE plpgsql; + DROP FUNCTION get_new_filter_key; DROP FUNCTION get_new_event_filter_key; DROP FUNCTION get_new_event_key; diff --git a/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql b/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql index 8b5ee748f..b9f0380c4 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql @@ -113,7 +113,7 @@ ALTER TABLE IF EXISTS projects -- To migrate saved search data --- SET client_min_messages TO NOTICE; +SET client_min_messages TO NOTICE; CREATE OR REPLACE FUNCTION get_new_event_key(key text) RETURNS text AS $$ @@ -299,6 +299,105 @@ $$ $$ LANGUAGE plpgsql; + +-- To migrate saved metric_series data +DO +$$ + DECLARE + row RECORD; + events_att JSONB; + event_filters_att JSONB; + filters_att JSONB; + element JSONB; + s_element JSONB; + new_value TEXT; + new_events JSONB[]; + new_filters JSONB[]; + new_event_filters JSONB[]; + changed BOOLEAN; + planned_update JSONB[]; + BEGIN + planned_update := '{}'::jsonb[]; + FOR row IN SELECT * FROM metric_series + LOOP + -- Transform events attributes + events_att := row.filter -> 'events'; + IF events_att IS NOT NULL THEN + new_events := '{}'::jsonb[]; + FOR element IN SELECT jsonb_array_elements(events_att) + LOOP + changed := FALSE; + new_value := get_new_event_key(element ->> 'type'); + if new_value IS NOT NULL THEN + changed := TRUE; + new_value := replace(new_value, '"', ''); + element := element || jsonb_build_object('type', new_value); + END IF; + -- Transform event's sub-filters attributes + event_filters_att := element -> 'filters'; + new_event_filters := '{}'::jsonb[]; + IF event_filters_att IS NOT NULL AND jsonb_array_length(event_filters_att) > 0 THEN + FOR s_element IN SELECT jsonb_array_elements(event_filters_att) + LOOP + new_value := get_new_event_filter_key(s_element ->> 'type'); + if new_value IS NOT NULL THEN + changed := TRUE; + new_value := replace(new_value, '"', ''); + s_element := s_element || jsonb_build_object('type', new_value); + new_event_filters := array_append(new_event_filters, s_element); + END IF; + END LOOP; + element := element || jsonb_build_object('filters', new_event_filters); + END IF; + IF changed THEN + new_events := array_append(new_events, element); + END IF; + END LOOP; + IF array_length(new_events, 1) > 0 THEN + row.filter := row.filter || jsonb_build_object('events', new_events); + END IF; + END IF; + + -- Transform filters attributes + filters_att := row.filter -> 'filters'; + IF filters_att IS NOT NULL THEN + new_filters := '{}'::jsonb; + FOR element IN SELECT jsonb_array_elements(filters_att) + LOOP + new_value := get_new_filter_key(element ->> 'type'); + if new_value IS NOT NULL THEN + new_value := replace(new_value, '"', ''); + element := element || jsonb_build_object('type', new_value); + new_filters := array_append(new_filters, element); + END IF; + END LOOP; + IF array_length(new_filters, 1) > 0 THEN + row.filter := row.filter || jsonb_build_object('filters', new_filters); + END IF; + END IF; + + IF array_length(new_events, 1) > 0 OR array_length(new_filters, 1) > 0 THEN + planned_update := array_append(planned_update, + jsonb_build_object('id', row.series_id, 'change', row.filter)); + END IF; + END LOOP; + + -- Update metric_series + IF array_length(planned_update, 1) > 0 THEN + raise notice 'must update % elements',array_length(planned_update, 1); + + UPDATE metric_series + SET filter=changes.change -> 'change' + FROM (SELECT unnest(planned_update)) AS changes(change) + WHERE series_id = (changes.change -> 'id')::integer; + raise notice 'update done'; + ELSE + raise notice 'nothing to update'; + END IF; + END ; +$$ +LANGUAGE plpgsql; + DROP FUNCTION get_new_filter_key; DROP FUNCTION get_new_event_filter_key; DROP FUNCTION get_new_event_key; From 2b0f95cbbb2a6f5cde1042874b7faea51074060e Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 20 Feb 2023 10:35:57 +0100 Subject: [PATCH 025/218] fix(ui) - search filters update --- .../Filters/FilterSource/FilterSource.tsx | 6 ---- .../shared/SessionSearch/SessionSearch.tsx | 28 ++++++++++++++----- 2 files changed, 21 insertions(+), 13 deletions(-) diff --git a/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx b/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx index eed1e6e1d..08c93d8df 100644 --- a/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx +++ b/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx @@ -1,7 +1,6 @@ import { FilterType } from 'App/types/filter/filterType'; import React, { useState, useEffect } from 'react'; import stl from './FilterSource.module.css'; -import { debounce } from 'App/utils'; import cn from 'classnames'; interface Props { @@ -11,16 +10,11 @@ interface Props { function FilterSource(props: Props) { const { filter } = props; const [value, setValue] = useState(filter.source[0] || ''); - const debounceUpdate: any = React.useCallback(debounce(props.onUpdate, 1000), [props.onUpdate]); useEffect(() => { setValue(filter.source[0] || ''); }, [filter]); - useEffect(() => { - debounceUpdate({ ...filter, source: [value] }); - }, [value]); - const write = ({ target: { value, name } }: any) => setValue(value); const renderFiled = () => { diff --git a/frontend/app/components/shared/SessionSearch/SessionSearch.tsx b/frontend/app/components/shared/SessionSearch/SessionSearch.tsx index 48856d929..84fb770a8 100644 --- a/frontend/app/components/shared/SessionSearch/SessionSearch.tsx +++ b/frontend/app/components/shared/SessionSearch/SessionSearch.tsx @@ -1,24 +1,32 @@ -import React from 'react'; +import React, { useEffect } from 'react'; import FilterList from 'Shared/Filters/FilterList'; import FilterSelection from 'Shared/Filters/FilterSelection'; import SaveFilterButton from 'Shared/SaveFilterButton'; import { connect } from 'react-redux'; import { Button } from 'UI'; -import { edit, addFilter } from 'Duck/search'; +import { edit, addFilter, fetchSessions, updateFilter } from 'Duck/search'; import SessionSearchQueryParamHandler from 'Shared/SessionSearchQueryParamHandler'; +import { debounce } from 'App/utils'; + +let debounceFetch: any = () => {} + interface Props { appliedFilter: any; edit: typeof edit; addFilter: typeof addFilter; saveRequestPayloads: boolean; metaLoading?: boolean + fetchSessions: typeof fetchSessions; + updateFilter: typeof updateFilter; } function SessionSearch(props: Props) { const { appliedFilter, saveRequestPayloads = false, metaLoading } = props; const hasEvents = appliedFilter.filters.filter((i: any) => i.isEvent).size > 0; const hasFilters = appliedFilter.filters.filter((i: any) => !i.isEvent).size > 0; - + useEffect(() => { + debounceFetch = debounce(() => props.fetchSessions(), 500); + }, []) const onAddFilter = (filter: any) => { props.addFilter(filter); @@ -33,10 +41,12 @@ function SessionSearch(props: Props) { } }); - props.edit({ + props.updateFilter({ ...appliedFilter, filters: newFilters, }); + + debounceFetch() }; const onRemoveFilter = (filterIndex: any) => { @@ -44,15 +54,19 @@ function SessionSearch(props: Props) { return i !== filterIndex; }); - props.edit({ + props.updateFilter({ filters: newFilters, }); + + debounceFetch() }; const onChangeEventsOrder = (e: any, { value }: any) => { - props.edit({ + props.updateFilter({ eventsOrder: value, }); + + debounceFetch() }; return !metaLoading && ( @@ -102,5 +116,5 @@ export default connect( appliedFilter: state.getIn(['search', 'instance']), metaLoading: state.getIn(['customFields', 'fetchRequestActive', 'loading']) }), - { edit, addFilter } + { edit, addFilter, fetchSessions, updateFilter } )(SessionSearch); From 1518fbf594b125bd0a91a115071a078de6801c10 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 17 Feb 2023 15:24:33 +0100 Subject: [PATCH 026/218] change(ui) - show percentage based on trigger option --- .../components/Dashboard/components/Alerts/AlertListItem.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx index 78f2aa24f..acef2a71c 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx @@ -131,7 +131,7 @@ function AlertListItem(props: Props) { {' is '} {alert.query.operator} - {numberWithCommas(alert.query.right)} {alert.metric?.unit} + {numberWithCommas(alert.query.right)} {alert.change === 'percent' ? '%' : alert.metric?.unit} {' over the past '} {getThreshold( From 2953282b21fbb11687dffaa07fb3305ba57ab91f Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 17 Feb 2023 15:29:07 +0100 Subject: [PATCH 027/218] change(ui) - unit space --- .../components/Dashboard/components/Alerts/AlertListItem.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx index acef2a71c..024cc734c 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx @@ -131,7 +131,7 @@ function AlertListItem(props: Props) { {' is '} {alert.query.operator} - {numberWithCommas(alert.query.right)} {alert.change === 'percent' ? '%' : alert.metric?.unit} + {numberWithCommas(alert.query.right)}{alert.change === 'percent' ? '%' : alert.metric?.unit} {' over the past '} {getThreshold( From 6302ff4df4df02600534fcf68b6e86093373ce87 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 20 Feb 2023 10:35:57 +0100 Subject: [PATCH 028/218] fix(ui) - search filters update --- .../Filters/FilterSource/FilterSource.tsx | 6 ---- .../shared/SessionSearch/SessionSearch.tsx | 28 ++++++++++++++----- 2 files changed, 21 insertions(+), 13 deletions(-) diff --git a/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx b/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx index eed1e6e1d..08c93d8df 100644 --- a/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx +++ b/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx @@ -1,7 +1,6 @@ import { FilterType } from 'App/types/filter/filterType'; import React, { useState, useEffect } from 'react'; import stl from './FilterSource.module.css'; -import { debounce } from 'App/utils'; import cn from 'classnames'; interface Props { @@ -11,16 +10,11 @@ interface Props { function FilterSource(props: Props) { const { filter } = props; const [value, setValue] = useState(filter.source[0] || ''); - const debounceUpdate: any = React.useCallback(debounce(props.onUpdate, 1000), [props.onUpdate]); useEffect(() => { setValue(filter.source[0] || ''); }, [filter]); - useEffect(() => { - debounceUpdate({ ...filter, source: [value] }); - }, [value]); - const write = ({ target: { value, name } }: any) => setValue(value); const renderFiled = () => { diff --git a/frontend/app/components/shared/SessionSearch/SessionSearch.tsx b/frontend/app/components/shared/SessionSearch/SessionSearch.tsx index 48856d929..84fb770a8 100644 --- a/frontend/app/components/shared/SessionSearch/SessionSearch.tsx +++ b/frontend/app/components/shared/SessionSearch/SessionSearch.tsx @@ -1,24 +1,32 @@ -import React from 'react'; +import React, { useEffect } from 'react'; import FilterList from 'Shared/Filters/FilterList'; import FilterSelection from 'Shared/Filters/FilterSelection'; import SaveFilterButton from 'Shared/SaveFilterButton'; import { connect } from 'react-redux'; import { Button } from 'UI'; -import { edit, addFilter } from 'Duck/search'; +import { edit, addFilter, fetchSessions, updateFilter } from 'Duck/search'; import SessionSearchQueryParamHandler from 'Shared/SessionSearchQueryParamHandler'; +import { debounce } from 'App/utils'; + +let debounceFetch: any = () => {} + interface Props { appliedFilter: any; edit: typeof edit; addFilter: typeof addFilter; saveRequestPayloads: boolean; metaLoading?: boolean + fetchSessions: typeof fetchSessions; + updateFilter: typeof updateFilter; } function SessionSearch(props: Props) { const { appliedFilter, saveRequestPayloads = false, metaLoading } = props; const hasEvents = appliedFilter.filters.filter((i: any) => i.isEvent).size > 0; const hasFilters = appliedFilter.filters.filter((i: any) => !i.isEvent).size > 0; - + useEffect(() => { + debounceFetch = debounce(() => props.fetchSessions(), 500); + }, []) const onAddFilter = (filter: any) => { props.addFilter(filter); @@ -33,10 +41,12 @@ function SessionSearch(props: Props) { } }); - props.edit({ + props.updateFilter({ ...appliedFilter, filters: newFilters, }); + + debounceFetch() }; const onRemoveFilter = (filterIndex: any) => { @@ -44,15 +54,19 @@ function SessionSearch(props: Props) { return i !== filterIndex; }); - props.edit({ + props.updateFilter({ filters: newFilters, }); + + debounceFetch() }; const onChangeEventsOrder = (e: any, { value }: any) => { - props.edit({ + props.updateFilter({ eventsOrder: value, }); + + debounceFetch() }; return !metaLoading && ( @@ -102,5 +116,5 @@ export default connect( appliedFilter: state.getIn(['search', 'instance']), metaLoading: state.getIn(['customFields', 'fetchRequestActive', 'loading']) }), - { edit, addFilter } + { edit, addFilter, fetchSessions, updateFilter } )(SessionSearch); From 76f971237f899879714e7b1c63ddf692a6d46ba2 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 20 Feb 2023 11:09:50 +0100 Subject: [PATCH 029/218] feat(alerts): fixed no join constraint --- ee/api/chalicelib/core/alerts_processor.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/ee/api/chalicelib/core/alerts_processor.py b/ee/api/chalicelib/core/alerts_processor.py index 06663336c..17e4d275f 100644 --- a/ee/api/chalicelib/core/alerts_processor.py +++ b/ee/api/chalicelib/core/alerts_processor.py @@ -149,8 +149,7 @@ def Build(a): "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000, "timestamp_sub2": TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000} else: - sub1 = f"""{subQ} AND timestamp>=%(startDate)s - AND timestamp<=%(now)s + sub1 = f"""{subQ} {"AND timestamp >= %(startDate)s AND timestamp <= %(now)s" if not is_ss else ""} {"AND start_ts >= %(startDate)s AND start_ts <= %(now)s" if j_s else ""}""" params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000 sub2 = f"""{subQ} {"AND timestamp < %(startDate)s AND timestamp >= %(timestamp_sub2)s" if not is_ss else ""} From 1b675b8e400858261aad81840b9e7f3074eca233 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 20 Feb 2023 11:24:57 +0100 Subject: [PATCH 030/218] fix(ui) - search filters update --- frontend/app/utils/search.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/utils/search.ts b/frontend/app/utils/search.ts index 82b3daee1..017a5a7f6 100644 --- a/frontend/app/utils/search.ts +++ b/frontend/app/utils/search.ts @@ -80,7 +80,7 @@ const getFiltersFromEntries = (entires: any) => { filter.value = valueArr; filter.operator = operator; - filter.source = sourceArr; + filter.source = sourceArr && sourceArr.length > 0 ? sourceArr : null; filter.sourceOperator = !!sourceOperator ? decodeURI(sourceOperator) : null; if (!filter.filters || filter.filters.size === 0) { filters.push(filter); From 9a8d43a323968b52ceea2a48e72d5f993a8e2c29 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 20 Feb 2023 11:24:57 +0100 Subject: [PATCH 031/218] fix(ui) - search filters update --- frontend/app/utils/search.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/utils/search.ts b/frontend/app/utils/search.ts index 82b3daee1..017a5a7f6 100644 --- a/frontend/app/utils/search.ts +++ b/frontend/app/utils/search.ts @@ -80,7 +80,7 @@ const getFiltersFromEntries = (entires: any) => { filter.value = valueArr; filter.operator = operator; - filter.source = sourceArr; + filter.source = sourceArr && sourceArr.length > 0 ? sourceArr : null; filter.sourceOperator = !!sourceOperator ? decodeURI(sourceOperator) : null; if (!filter.filters || filter.filters.size === 0) { filters.push(filter); From d7c2052ac0da038dcb3afabb9c8e59ca95208de5 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 20 Feb 2023 14:43:01 +0100 Subject: [PATCH 032/218] feat(DB): changes --- ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql | 2 +- scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql index 68b115e46..c6060201a 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql @@ -326,7 +326,7 @@ $$ LANGUAGE plpgsql; --- To migrate saved metric_series data +-- To migrate metric_series data DO $$ DECLARE diff --git a/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql b/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql index b9f0380c4..a490d0943 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql @@ -300,7 +300,7 @@ $$ LANGUAGE plpgsql; --- To migrate saved metric_series data +-- To migrate metric_series data DO $$ DECLARE From 68c9f30200bef762462b839b93911df3ae5a6c3a Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Mon, 20 Feb 2023 15:30:45 +0100 Subject: [PATCH 033/218] fix(ui): keep share message after sharing --- frontend/app/components/shared/SharePopup/SharePopup.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/app/components/shared/SharePopup/SharePopup.js b/frontend/app/components/shared/SharePopup/SharePopup.js index 984ce0060..07726d14a 100644 --- a/frontend/app/components/shared/SharePopup/SharePopup.js +++ b/frontend/app/components/shared/SharePopup/SharePopup.js @@ -80,8 +80,8 @@ export default class SharePopup extends React.PureComponent { handleSuccess = (endpoint) => { const obj = endpoint === 'Slack' - ? { isOpen: false, comment: '', loadingSlack: false } - : { isOpen: false, comment: '', loadingTeams: false }; + ? { loadingSlack: false } + : { loadingTeams: false }; this.setState(obj); toast.success(`Sent to ${endpoint}.`); }; @@ -109,7 +109,7 @@ export default class SharePopup extends React.PureComponent { return ( this.setState({ isOpen: true })} - onClose={() => this.setState({ isOpen: false })} + onClose={() => this.setState({ isOpen: false, comment: '' })} render={() => (
{this.state.loadingTeams || this.state.loadingSlack ? ( From 2fae4549d665ff5d04642b2f8e24e70d57baa7a2 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 20 Feb 2023 16:37:19 +0100 Subject: [PATCH 034/218] fix(ui) - cards list filter by dashboard --- .../components/Dashboard/components/MetricsList/MetricsList.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Dashboard/components/MetricsList/MetricsList.tsx b/frontend/app/components/Dashboard/components/MetricsList/MetricsList.tsx index f2639d37f..1b9f7dfc9 100644 --- a/frontend/app/components/Dashboard/components/MetricsList/MetricsList.tsx +++ b/frontend/app/components/Dashboard/components/MetricsList/MetricsList.tsx @@ -21,7 +21,7 @@ function MetricsList({ const dashboard = dashboardStore.selectedDashboard; const existingCardIds = useMemo(() => dashboard?.widgets?.map(i => parseInt(i.metricId)), [dashboard]); - const cards = useMemo(() => metricStore.filteredCards.filter(i => !existingCardIds?.includes(parseInt(i.metricId))), [metricStore.filteredCards]); + const cards = useMemo(() => !!onSelectionChange ? metricStore.filteredCards.filter(i => !existingCardIds?.includes(parseInt(i.metricId))) : metricStore.filteredCards, [metricStore.filteredCards]); useEffect(() => { metricStore.fetchList(); From fdd28dbc4ae9e9b59a591afad5577679b3113e57 Mon Sep 17 00:00:00 2001 From: Alexander Date: Mon, 20 Feb 2023 16:37:55 +0100 Subject: [PATCH 035/218] Draft: New metrics module (#982) * feat(backend): created new metrics module --- backend/cmd/assets/main.go | 19 +- backend/cmd/db/main.go | 13 +- backend/cmd/ender/main.go | 17 +- backend/cmd/http/main.go | 23 ++- backend/cmd/integrations/main.go | 18 +- backend/cmd/sink/main.go | 36 ++-- backend/cmd/storage/main.go | 15 +- backend/internal/assets/cacher/cacher.go | 47 ++--- backend/internal/http/router/handlers-ios.go | 37 ++-- backend/internal/http/router/handlers-web.go | 65 +++---- backend/internal/http/router/handlers.go | 6 +- backend/internal/http/router/response.go | 33 +++- backend/internal/http/router/router.go | 44 +---- backend/internal/sessionender/ender.go | 45 ++--- backend/internal/sink/assetscache/assets.go | 82 +++----- backend/internal/storage/storage.go | 143 ++++---------- backend/pkg/db/postgres/batches.go | 65 ++----- backend/pkg/db/postgres/bulk.go | 55 ++---- backend/pkg/db/postgres/bulks.go | 33 ++-- backend/pkg/db/postgres/connector.go | 49 +---- backend/pkg/db/postgres/pool.go | 73 +++----- backend/pkg/messages/iterator-sink.go | 3 + backend/pkg/messages/iterator.go | 5 +- backend/pkg/metrics/assets/metrics.go | 72 ++++++++ backend/pkg/metrics/common/metrics.go | 11 ++ backend/pkg/metrics/database/metrics.go | 127 +++++++++++++ backend/pkg/metrics/ender/metrics.go | 51 +++++ backend/pkg/metrics/http/metrics.go | 55 ++++++ backend/pkg/metrics/server.go | 40 ++++ backend/pkg/metrics/sink/metrics.go | 185 +++++++++++++++++++ backend/pkg/metrics/storage/metrics.go | 114 ++++++++++++ ee/backend/pkg/db/clickhouse/bulk.go | 16 +- ee/backend/pkg/db/clickhouse/connector.go | 26 +-- 33 files changed, 1021 insertions(+), 602 deletions(-) create mode 100644 backend/pkg/metrics/assets/metrics.go create mode 100644 backend/pkg/metrics/common/metrics.go create mode 100644 backend/pkg/metrics/database/metrics.go create mode 100644 backend/pkg/metrics/ender/metrics.go create mode 100644 backend/pkg/metrics/http/metrics.go create mode 100644 backend/pkg/metrics/server.go create mode 100644 backend/pkg/metrics/sink/metrics.go create mode 100644 backend/pkg/metrics/storage/metrics.go diff --git a/backend/cmd/assets/main.go b/backend/cmd/assets/main.go index b41dedd87..b05ecbe52 100644 --- a/backend/cmd/assets/main.go +++ b/backend/cmd/assets/main.go @@ -1,9 +1,7 @@ package main import ( - "context" "log" - "openreplay/backend/pkg/pprof" "os" "os/signal" "syscall" @@ -13,12 +11,16 @@ import ( "openreplay/backend/internal/assets/cacher" config "openreplay/backend/internal/config/assets" "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/monitoring" + "openreplay/backend/pkg/metrics" + assetsMetrics "openreplay/backend/pkg/metrics/assets" + "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" ) func main() { - metrics := monitoring.New("assets") + m := metrics.New() + m.Register(assetsMetrics.List()) + log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := config.New() @@ -26,18 +28,13 @@ func main() { pprof.StartProfilingServer() } - cacher := cacher.NewCacher(cfg, metrics) - - totalAssets, err := metrics.RegisterCounter("assets_total") - if err != nil { - log.Printf("can't create assets_total metric: %s", err) - } + cacher := cacher.NewCacher(cfg) msgHandler := func(msg messages.Message) { switch m := msg.(type) { case *messages.AssetCache: cacher.CacheURL(m.SessionID(), m.URL) - totalAssets.Add(context.Background(), 1) + assetsMetrics.IncreaseProcessesSessions() // TODO: connect to "raw" topic in order to listen for JSException case *messages.JSException: sourceList, err := assets.ExtractJSExceptionSources(&m.Payload) diff --git a/backend/cmd/db/main.go b/backend/cmd/db/main.go index f9440a908..84b0d81ed 100644 --- a/backend/cmd/db/main.go +++ b/backend/cmd/db/main.go @@ -3,8 +3,6 @@ package main import ( "errors" "log" - types2 "openreplay/backend/pkg/db/types" - "openreplay/backend/pkg/pprof" "os" "os/signal" "syscall" @@ -14,16 +12,21 @@ import ( "openreplay/backend/internal/db/datasaver" "openreplay/backend/pkg/db/cache" "openreplay/backend/pkg/db/postgres" + types2 "openreplay/backend/pkg/db/types" "openreplay/backend/pkg/handlers" custom2 "openreplay/backend/pkg/handlers/custom" "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/monitoring" + "openreplay/backend/pkg/metrics" + databaseMetrics "openreplay/backend/pkg/metrics/database" + "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/sessions" ) func main() { - metrics := monitoring.New("db") + m := metrics.New() + m.Register(databaseMetrics.List()) + log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := db.New() @@ -33,7 +36,7 @@ func main() { // Init database pg := cache.NewPGCache( - postgres.NewConn(cfg.Postgres.String(), cfg.BatchQueueLimit, cfg.BatchSizeLimit, metrics), cfg.ProjectExpirationTimeoutMs) + postgres.NewConn(cfg.Postgres.String(), cfg.BatchQueueLimit, cfg.BatchSizeLimit), cfg.ProjectExpirationTimeoutMs) defer pg.Close() // HandlersFabric returns the list of message handlers we want to be applied to each incoming message. diff --git a/backend/cmd/ender/main.go b/backend/cmd/ender/main.go index 74b0b8bd2..da7ca9b89 100644 --- a/backend/cmd/ender/main.go +++ b/backend/cmd/ender/main.go @@ -2,8 +2,6 @@ package main import ( "log" - "openreplay/backend/internal/storage" - "openreplay/backend/pkg/pprof" "os" "os/signal" "strings" @@ -12,16 +10,23 @@ import ( "openreplay/backend/internal/config/ender" "openreplay/backend/internal/sessionender" + "openreplay/backend/internal/storage" "openreplay/backend/pkg/db/cache" "openreplay/backend/pkg/db/postgres" "openreplay/backend/pkg/intervals" "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/monitoring" + "openreplay/backend/pkg/metrics" + databaseMetrics "openreplay/backend/pkg/metrics/database" + enderMetrics "openreplay/backend/pkg/metrics/ender" + "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" ) func main() { - metrics := monitoring.New("ender") + m := metrics.New() + m.Register(enderMetrics.List()) + m.Register(databaseMetrics.List()) + log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := ender.New() @@ -29,10 +34,10 @@ func main() { pprof.StartProfilingServer() } - pg := cache.NewPGCache(postgres.NewConn(cfg.Postgres.String(), 0, 0, metrics), cfg.ProjectExpirationTimeoutMs) + pg := cache.NewPGCache(postgres.NewConn(cfg.Postgres.String(), 0, 0), cfg.ProjectExpirationTimeoutMs) defer pg.Close() - sessions, err := sessionender.New(metrics, intervals.EVENTS_SESSION_END_TIMEOUT, cfg.PartitionsNumber) + sessions, err := sessionender.New(intervals.EVENTS_SESSION_END_TIMEOUT, cfg.PartitionsNumber) if err != nil { log.Printf("can't init ender service: %s", err) return diff --git a/backend/cmd/http/main.go b/backend/cmd/http/main.go index 4fb82b635..83eedaf29 100644 --- a/backend/cmd/http/main.go +++ b/backend/cmd/http/main.go @@ -2,23 +2,28 @@ package main import ( "log" - "openreplay/backend/internal/config/http" - "openreplay/backend/internal/http/router" - "openreplay/backend/internal/http/server" - "openreplay/backend/internal/http/services" - "openreplay/backend/pkg/monitoring" - "openreplay/backend/pkg/pprof" "os" "os/signal" "syscall" + "openreplay/backend/internal/config/http" + "openreplay/backend/internal/http/router" + "openreplay/backend/internal/http/server" + "openreplay/backend/internal/http/services" "openreplay/backend/pkg/db/cache" "openreplay/backend/pkg/db/postgres" + "openreplay/backend/pkg/metrics" + databaseMetrics "openreplay/backend/pkg/metrics/database" + httpMetrics "openreplay/backend/pkg/metrics/http" + "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" ) func main() { - metrics := monitoring.New("http") + m := metrics.New() + m.Register(httpMetrics.List()) + m.Register(databaseMetrics.List()) + log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := http.New() @@ -31,14 +36,14 @@ func main() { defer producer.Close(15000) // Connect to database - dbConn := cache.NewPGCache(postgres.NewConn(cfg.Postgres.String(), 0, 0, metrics), 1000*60*20) + dbConn := cache.NewPGCache(postgres.NewConn(cfg.Postgres.String(), 0, 0), 1000*60*20) defer dbConn.Close() // Build all services services := services.New(cfg, producer, dbConn) // Init server's routes - router, err := router.NewRouter(cfg, services, metrics) + router, err := router.NewRouter(cfg, services) if err != nil { log.Fatalf("failed while creating engine: %s", err) } diff --git a/backend/cmd/integrations/main.go b/backend/cmd/integrations/main.go index 8c6d56966..3fa07ee9c 100644 --- a/backend/cmd/integrations/main.go +++ b/backend/cmd/integrations/main.go @@ -2,24 +2,26 @@ package main import ( "log" - config "openreplay/backend/internal/config/integrations" - "openreplay/backend/internal/integrations/clientManager" - "openreplay/backend/pkg/monitoring" - "openreplay/backend/pkg/pprof" - "time" - "os" "os/signal" "syscall" + "time" + config "openreplay/backend/internal/config/integrations" + "openreplay/backend/internal/integrations/clientManager" "openreplay/backend/pkg/db/postgres" "openreplay/backend/pkg/intervals" + "openreplay/backend/pkg/metrics" + databaseMetrics "openreplay/backend/pkg/metrics/database" + "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/token" ) func main() { - metrics := monitoring.New("integrations") + m := metrics.New() + m.Register(databaseMetrics.List()) + log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := config.New() @@ -27,7 +29,7 @@ func main() { pprof.StartProfilingServer() } - pg := postgres.NewConn(cfg.Postgres.String(), 0, 0, metrics) + pg := postgres.NewConn(cfg.Postgres.String(), 0, 0) defer pg.Close() tokenizer := token.NewTokenizer(cfg.TokenSecret) diff --git a/backend/cmd/sink/main.go b/backend/cmd/sink/main.go index 74e0b1db1..4bbaeeee4 100644 --- a/backend/cmd/sink/main.go +++ b/backend/cmd/sink/main.go @@ -2,10 +2,8 @@ package main import ( "bytes" - "context" "encoding/binary" "log" - "openreplay/backend/pkg/pprof" "os" "os/signal" "syscall" @@ -16,13 +14,16 @@ import ( "openreplay/backend/internal/sink/sessionwriter" "openreplay/backend/internal/storage" "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/monitoring" + "openreplay/backend/pkg/metrics" + sinkMetrics "openreplay/backend/pkg/metrics/sink" + "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/url/assets" ) func main() { - metrics := monitoring.New("sink") + m := metrics.New() + m.Register(sinkMetrics.List()) log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := sink.New() @@ -39,22 +40,8 @@ func main() { producer := queue.NewProducer(cfg.MessageSizeLimit, true) defer producer.Close(cfg.ProducerCloseTimeout) rewriter := assets.NewRewriter(cfg.AssetsOrigin) - assetMessageHandler := assetscache.New(cfg, rewriter, producer, metrics) - + assetMessageHandler := assetscache.New(cfg, rewriter, producer) counter := storage.NewLogCounter() - // Session message metrics - totalMessages, err := metrics.RegisterCounter("messages_total") - if err != nil { - log.Printf("can't create messages_total metric: %s", err) - } - savedMessages, err := metrics.RegisterCounter("messages_saved") - if err != nil { - log.Printf("can't create messages_saved metric: %s", err) - } - messageSize, err := metrics.RegisterHistogram("messages_size") - if err != nil { - log.Printf("can't create messages_size metric: %s", err) - } var ( sessionID uint64 @@ -74,11 +61,12 @@ func main() { if domBuffer.Len() <= 0 && devBuffer.Len() <= 0 { return } + sinkMetrics.RecordWrittenBytes(float64(domBuffer.Len()), "dom") + sinkMetrics.RecordWrittenBytes(float64(devBuffer.Len()), "devtools") // Write buffered batches to the session if err := writer.Write(sessionID, domBuffer.Bytes(), devBuffer.Bytes()); err != nil { log.Printf("writer error: %s", err) - return } // Prepare buffer for the next batch @@ -88,8 +76,7 @@ func main() { return } - // [METRICS] Increase the number of processed messages - totalMessages.Add(context.Background(), 1) + sinkMetrics.IncreaseTotalMessages() // Send SessionEnd trigger to storage service if msg.TypeID() == messages.MsgSessionEnd { @@ -187,9 +174,8 @@ func main() { } } - // [METRICS] Increase the number of written to the files messages and the message size - messageSize.Record(context.Background(), float64(len(msg.Encode()))) - savedMessages.Add(context.Background(), 1) + sinkMetrics.IncreaseWrittenMessages() + sinkMetrics.RecordMessageSize(float64(len(msg.Encode()))) } consumer := queue.NewConsumer( diff --git a/backend/cmd/storage/main.go b/backend/cmd/storage/main.go index dcb1b53ed..472324b95 100644 --- a/backend/cmd/storage/main.go +++ b/backend/cmd/storage/main.go @@ -2,7 +2,6 @@ package main import ( "log" - "openreplay/backend/pkg/pprof" "os" "os/signal" "syscall" @@ -12,13 +11,17 @@ import ( "openreplay/backend/internal/storage" "openreplay/backend/pkg/failover" "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/monitoring" + "openreplay/backend/pkg/metrics" + storageMetrics "openreplay/backend/pkg/metrics/storage" + "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" - s3storage "openreplay/backend/pkg/storage" + cloud "openreplay/backend/pkg/storage" ) func main() { - metrics := monitoring.New("storage") + m := metrics.New() + m.Register(storageMetrics.List()) + log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := config.New() @@ -26,8 +29,8 @@ func main() { pprof.StartProfilingServer() } - s3 := s3storage.NewS3(cfg.S3Region, cfg.S3Bucket) - srv, err := storage.New(cfg, s3, metrics) + s3 := cloud.NewS3(cfg.S3Region, cfg.S3Bucket) + srv, err := storage.New(cfg, s3) if err != nil { log.Printf("can't init storage service: %s", err) return diff --git a/backend/internal/assets/cacher/cacher.go b/backend/internal/assets/cacher/cacher.go index 8bbee092f..4b0353a9a 100644 --- a/backend/internal/assets/cacher/cacher.go +++ b/backend/internal/assets/cacher/cacher.go @@ -1,16 +1,13 @@ package cacher import ( - "context" "crypto/tls" "fmt" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "io" "io/ioutil" - "log" "mime" "net/http" - "openreplay/backend/pkg/monitoring" + metrics "openreplay/backend/pkg/metrics/assets" "path/filepath" "strings" "time" @@ -25,30 +22,22 @@ import ( const MAX_CACHE_DEPTH = 5 type cacher struct { - timeoutMap *timeoutMap // Concurrency implemented - s3 *storage.S3 // AWS Docs: "These clients are safe to use concurrently." - httpClient *http.Client // Docs: "Clients are safe for concurrent use by multiple goroutines." - rewriter *assets.Rewriter // Read only - Errors chan error - sizeLimit int - downloadedAssets syncfloat64.Counter - requestHeaders map[string]string - workers *WorkerPool + timeoutMap *timeoutMap // Concurrency implemented + s3 *storage.S3 // AWS Docs: "These clients are safe to use concurrently." + httpClient *http.Client // Docs: "Clients are safe for concurrent use by multiple goroutines." + rewriter *assets.Rewriter // Read only + Errors chan error + sizeLimit int + requestHeaders map[string]string + workers *WorkerPool } func (c *cacher) CanCache() bool { return c.workers.CanAddTask() } -func NewCacher(cfg *config.Config, metrics *monitoring.Metrics) *cacher { +func NewCacher(cfg *config.Config) *cacher { rewriter := assets.NewRewriter(cfg.AssetsOrigin) - if metrics == nil { - log.Fatalf("metrics are empty") - } - downloadedAssets, err := metrics.RegisterCounter("assets_downloaded") - if err != nil { - log.Printf("can't create downloaded_assets metric: %s", err) - } c := &cacher{ timeoutMap: newTimeoutMap(), s3: storage.NewS3(cfg.AWSRegion, cfg.S3BucketAssets), @@ -59,11 +48,10 @@ func NewCacher(cfg *config.Config, metrics *monitoring.Metrics) *cacher { TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, }, }, - rewriter: rewriter, - Errors: make(chan error), - sizeLimit: cfg.AssetsSizeLimit, - downloadedAssets: downloadedAssets, - requestHeaders: cfg.AssetsRequestHeaders, + rewriter: rewriter, + Errors: make(chan error), + sizeLimit: cfg.AssetsSizeLimit, + requestHeaders: cfg.AssetsRequestHeaders, } c.workers = NewPool(64, c.CacheFile) return c @@ -75,6 +63,7 @@ func (c *cacher) CacheFile(task *Task) { func (c *cacher) cacheURL(t *Task) { t.retries-- + start := time.Now() req, _ := http.NewRequest("GET", t.requestURL, nil) if t.retries%2 == 0 { req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 6.1; rv:31.0) Gecko/20100101 Firefox/31.0") @@ -87,6 +76,7 @@ func (c *cacher) cacheURL(t *Task) { c.Errors <- errors.Wrap(err, t.urlContext) return } + metrics.RecordDownloadDuration(float64(time.Now().Sub(start).Milliseconds()), res.StatusCode) defer res.Body.Close() if res.StatusCode >= 400 { printErr := true @@ -122,12 +112,15 @@ func (c *cacher) cacheURL(t *Task) { } // TODO: implement in streams + start = time.Now() err = c.s3.Upload(strings.NewReader(strData), t.cachePath, contentType, false) if err != nil { + metrics.RecordUploadDuration(float64(time.Now().Sub(start).Milliseconds()), true) c.Errors <- errors.Wrap(err, t.urlContext) return } - c.downloadedAssets.Add(context.Background(), 1) + metrics.RecordUploadDuration(float64(time.Now().Sub(start).Milliseconds()), false) + metrics.IncreaseSavedSessions() if isCSS { if t.depth > 0 { diff --git a/backend/internal/http/router/handlers-ios.go b/backend/internal/http/router/handlers-ios.go index e0fc73b6f..b11918d54 100644 --- a/backend/internal/http/router/handlers-ios.go +++ b/backend/internal/http/router/handlers-ios.go @@ -22,28 +22,28 @@ func (e *Router) startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) req := &StartIOSSessionRequest{} if r.Body == nil { - ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty")) + ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty"), startTime, r.URL.Path, 0) return } body := http.MaxBytesReader(w, r.Body, e.cfg.JsonSizeLimit) defer body.Close() if err := json.NewDecoder(body).Decode(req); err != nil { - ResponseWithError(w, http.StatusBadRequest, err) + ResponseWithError(w, http.StatusBadRequest, err, startTime, r.URL.Path, 0) return } if req.ProjectKey == nil { - ResponseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required")) + ResponseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required"), startTime, r.URL.Path, 0) return } p, err := e.services.Database.GetProjectByKey(*req.ProjectKey) if err != nil { if postgres.IsNoRowsErr(err) { - ResponseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or is not active")) + ResponseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or is not active"), startTime, r.URL.Path, 0) } else { - ResponseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging + ResponseWithError(w, http.StatusInternalServerError, err, startTime, r.URL.Path, 0) // TODO: send error here only on staging } return } @@ -53,18 +53,18 @@ func (e *Router) startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) if err != nil { // Starting the new one dice := byte(rand.Intn(100)) // [0, 100) if dice >= p.SampleRate { - ResponseWithError(w, http.StatusForbidden, errors.New("cancel")) + ResponseWithError(w, http.StatusForbidden, errors.New("cancel"), startTime, r.URL.Path, 0) return } ua := e.services.UaParser.ParseFromHTTPRequest(r) if ua == nil { - ResponseWithError(w, http.StatusForbidden, errors.New("browser not recognized")) + ResponseWithError(w, http.StatusForbidden, errors.New("browser not recognized"), startTime, r.URL.Path, 0) return } sessionID, err := e.services.Flaker.Compose(uint64(startTime.UnixMilli())) if err != nil { - ResponseWithError(w, http.StatusInternalServerError, err) + ResponseWithError(w, http.StatusInternalServerError, err, startTime, r.URL.Path, 0) return } // TODO: if EXPIRED => send message for two sessions association @@ -94,22 +94,24 @@ func (e *Router) startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) UserUUID: userUUID, SessionID: strconv.FormatUint(tokenData.ID, 10), BeaconSizeLimit: e.cfg.BeaconSizeLimit, - }) + }, startTime, r.URL.Path, 0) } func (e *Router) pushMessagesHandlerIOS(w http.ResponseWriter, r *http.Request) { + startTime := time.Now() sessionData, err := e.services.Tokenizer.ParseFromHTTPRequest(r) if err != nil { - ResponseWithError(w, http.StatusUnauthorized, err) + ResponseWithError(w, http.StatusUnauthorized, err, startTime, r.URL.Path, 0) return } e.pushMessages(w, r, sessionData.ID, e.cfg.TopicRawIOS) } func (e *Router) pushLateMessagesHandlerIOS(w http.ResponseWriter, r *http.Request) { + startTime := time.Now() sessionData, err := e.services.Tokenizer.ParseFromHTTPRequest(r) if err != nil && err != token.EXPIRED { - ResponseWithError(w, http.StatusUnauthorized, err) + ResponseWithError(w, http.StatusUnauthorized, err, startTime, r.URL.Path, 0) return } // Check timestamps here? @@ -117,16 +119,17 @@ func (e *Router) pushLateMessagesHandlerIOS(w http.ResponseWriter, r *http.Reque } func (e *Router) imagesUploadHandlerIOS(w http.ResponseWriter, r *http.Request) { + startTime := time.Now() log.Printf("recieved imagerequest") sessionData, err := e.services.Tokenizer.ParseFromHTTPRequest(r) if err != nil { // Should accept expired token? - ResponseWithError(w, http.StatusUnauthorized, err) + ResponseWithError(w, http.StatusUnauthorized, err, startTime, r.URL.Path, 0) return } if r.Body == nil { - ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty")) + ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty"), startTime, r.URL.Path, 0) return } r.Body = http.MaxBytesReader(w, r.Body, e.cfg.FileSizeLimit) @@ -134,21 +137,21 @@ func (e *Router) imagesUploadHandlerIOS(w http.ResponseWriter, r *http.Request) err = r.ParseMultipartForm(1e6) // ~1Mb if err == http.ErrNotMultipart || err == http.ErrMissingBoundary { - ResponseWithError(w, http.StatusUnsupportedMediaType, err) + ResponseWithError(w, http.StatusUnsupportedMediaType, err, startTime, r.URL.Path, 0) return // } else if err == multipart.ErrMessageTooLarge // if non-files part exceeds 10 MB } else if err != nil { - ResponseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging + ResponseWithError(w, http.StatusInternalServerError, err, startTime, r.URL.Path, 0) // TODO: send error here only on staging return } if r.MultipartForm == nil { - ResponseWithError(w, http.StatusInternalServerError, errors.New("Multipart not parsed")) + ResponseWithError(w, http.StatusInternalServerError, errors.New("Multipart not parsed"), startTime, r.URL.Path, 0) return } if len(r.MultipartForm.Value["projectKey"]) == 0 { - ResponseWithError(w, http.StatusBadRequest, errors.New("projectKey parameter missing")) // status for missing/wrong parameter? + ResponseWithError(w, http.StatusBadRequest, errors.New("projectKey parameter missing"), startTime, r.URL.Path, 0) // status for missing/wrong parameter? return } diff --git a/backend/internal/http/router/handlers-web.go b/backend/internal/http/router/handlers-web.go index 7afd184e5..52a37b7f0 100644 --- a/backend/internal/http/router/handlers-web.go +++ b/backend/internal/http/router/handlers-web.go @@ -3,18 +3,17 @@ package router import ( "encoding/json" "errors" - "github.com/Masterminds/semver" - "go.opentelemetry.io/otel/attribute" "io" "log" "math/rand" "net/http" - "openreplay/backend/internal/http/uuid" - "openreplay/backend/pkg/flakeid" "strconv" "time" + "github.com/Masterminds/semver" + "openreplay/backend/internal/http/uuid" "openreplay/backend/pkg/db/postgres" + "openreplay/backend/pkg/flakeid" . "openreplay/backend/pkg/messages" "openreplay/backend/pkg/token" ) @@ -28,13 +27,6 @@ func (e *Router) readBody(w http.ResponseWriter, r *http.Request, limit int64) ( if err != nil { return nil, err } - - reqSize := len(bodyBytes) - e.requestSize.Record( - r.Context(), - float64(reqSize), - []attribute.KeyValue{attribute.String("method", r.URL.Path)}..., - ) return bodyBytes, nil } @@ -56,40 +48,43 @@ func getSessionTimestamp(req *StartSessionRequest, startTimeMili int64) (ts uint func (e *Router) startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) { startTime := time.Now() + bodySize := 0 // Check request body if r.Body == nil { - ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty")) + ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty"), startTime, r.URL.Path, bodySize) return } bodyBytes, err := e.readBody(w, r, e.cfg.JsonSizeLimit) if err != nil { log.Printf("error while reading request body: %s", err) - ResponseWithError(w, http.StatusRequestEntityTooLarge, err) + ResponseWithError(w, http.StatusRequestEntityTooLarge, err, startTime, r.URL.Path, bodySize) return } + bodySize = len(bodyBytes) // Parse request body req := &StartSessionRequest{} if err := json.Unmarshal(bodyBytes, req); err != nil { - ResponseWithError(w, http.StatusBadRequest, err) + ResponseWithError(w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) return } // Handler's logic if req.ProjectKey == nil { - ResponseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required")) + ResponseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required"), startTime, r.URL.Path, bodySize) return } p, err := e.services.Database.GetProjectByKey(*req.ProjectKey) if err != nil { if postgres.IsNoRowsErr(err) { - ResponseWithError(w, http.StatusNotFound, errors.New("project doesn't exist or capture limit has been reached")) + ResponseWithError(w, http.StatusNotFound, + errors.New("project doesn't exist or capture limit has been reached"), startTime, r.URL.Path, bodySize) } else { log.Printf("can't get project by key: %s", err) - ResponseWithError(w, http.StatusInternalServerError, errors.New("can't get project by key")) + ResponseWithError(w, http.StatusInternalServerError, errors.New("can't get project by key"), startTime, r.URL.Path, bodySize) } return } @@ -99,19 +94,19 @@ func (e *Router) startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) if err != nil || req.Reset { // Starting the new one dice := byte(rand.Intn(100)) // [0, 100) if dice >= p.SampleRate { - ResponseWithError(w, http.StatusForbidden, errors.New("cancel")) + ResponseWithError(w, http.StatusForbidden, errors.New("cancel"), startTime, r.URL.Path, bodySize) return } ua := e.services.UaParser.ParseFromHTTPRequest(r) if ua == nil { - ResponseWithError(w, http.StatusForbidden, errors.New("browser not recognized")) + ResponseWithError(w, http.StatusForbidden, errors.New("browser not recognized"), startTime, r.URL.Path, bodySize) return } startTimeMili := startTime.UnixMilli() sessionID, err := e.services.Flaker.Compose(uint64(startTimeMili)) if err != nil { - ResponseWithError(w, http.StatusInternalServerError, err) + ResponseWithError(w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return } // TODO: if EXPIRED => send message for two sessions association @@ -163,29 +158,33 @@ func (e *Router) startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) BeaconSizeLimit: e.getBeaconSize(tokenData.ID), StartTimestamp: int64(flakeid.ExtractTimestamp(tokenData.ID)), Delay: tokenData.Delay, - }) + }, startTime, r.URL.Path, bodySize) } func (e *Router) pushMessagesHandlerWeb(w http.ResponseWriter, r *http.Request) { + startTime := time.Now() + bodySize := 0 + // Check authorization sessionData, err := e.services.Tokenizer.ParseFromHTTPRequest(r) if err != nil { - ResponseWithError(w, http.StatusUnauthorized, err) + ResponseWithError(w, http.StatusUnauthorized, err, startTime, r.URL.Path, bodySize) return } // Check request body if r.Body == nil { - ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty")) + ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty"), startTime, r.URL.Path, bodySize) return } bodyBytes, err := e.readBody(w, r, e.getBeaconSize(sessionData.ID)) if err != nil { log.Printf("error while reading request body: %s", err) - ResponseWithError(w, http.StatusRequestEntityTooLarge, err) + ResponseWithError(w, http.StatusRequestEntityTooLarge, err, startTime, r.URL.Path, bodySize) return } + bodySize = len(bodyBytes) // Send processed messages to queue as array of bytes // TODO: check bytes for nonsense crap @@ -194,39 +193,43 @@ func (e *Router) pushMessagesHandlerWeb(w http.ResponseWriter, r *http.Request) log.Printf("can't send processed messages to queue: %s", err) } - w.WriteHeader(http.StatusOK) + ResponseOK(w, startTime, r.URL.Path, bodySize) } func (e *Router) notStartedHandlerWeb(w http.ResponseWriter, r *http.Request) { + startTime := time.Now() + bodySize := 0 + // Check request body if r.Body == nil { - ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty")) + ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty"), startTime, r.URL.Path, bodySize) return } bodyBytes, err := e.readBody(w, r, e.cfg.JsonSizeLimit) if err != nil { log.Printf("error while reading request body: %s", err) - ResponseWithError(w, http.StatusRequestEntityTooLarge, err) + ResponseWithError(w, http.StatusRequestEntityTooLarge, err, startTime, r.URL.Path, bodySize) return } + bodySize = len(bodyBytes) // Parse request body req := &NotStartedRequest{} if err := json.Unmarshal(bodyBytes, req); err != nil { - ResponseWithError(w, http.StatusBadRequest, err) + ResponseWithError(w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) return } // Handler's logic if req.ProjectKey == nil { - ResponseWithError(w, http.StatusForbidden, errors.New("projectKey value required")) + ResponseWithError(w, http.StatusForbidden, errors.New("projectKey value required"), startTime, r.URL.Path, bodySize) return } ua := e.services.UaParser.ParseFromHTTPRequest(r) // TODO?: insert anyway if ua == nil { - ResponseWithError(w, http.StatusForbidden, errors.New("browser not recognized")) + ResponseWithError(w, http.StatusForbidden, errors.New("browser not recognized"), startTime, r.URL.Path, bodySize) return } country := e.services.GeoIP.ExtractISOCodeFromHTTPRequest(r) @@ -248,5 +251,5 @@ func (e *Router) notStartedHandlerWeb(w http.ResponseWriter, r *http.Request) { log.Printf("Unable to insert Unstarted Session: %v\n", err) } - w.WriteHeader(http.StatusOK) + ResponseOK(w, startTime, r.URL.Path, bodySize) } diff --git a/backend/internal/http/router/handlers.go b/backend/internal/http/router/handlers.go index c36fdd668..425177341 100644 --- a/backend/internal/http/router/handlers.go +++ b/backend/internal/http/router/handlers.go @@ -6,9 +6,11 @@ import ( "io/ioutil" "log" "net/http" + "time" ) func (e *Router) pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64, topicName string) { + start := time.Now() body := http.MaxBytesReader(w, r.Body, e.cfg.BeaconSizeLimit) defer body.Close() @@ -21,7 +23,7 @@ func (e *Router) pushMessages(w http.ResponseWriter, r *http.Request, sessionID reader, err = gzip.NewReader(body) if err != nil { - ResponseWithError(w, http.StatusInternalServerError, err) // TODO: stage-dependent response + ResponseWithError(w, http.StatusInternalServerError, err, start, r.URL.Path, 0) // TODO: stage-dependent response return } //log.Println("Gzip reader init", reader) @@ -32,7 +34,7 @@ func (e *Router) pushMessages(w http.ResponseWriter, r *http.Request, sessionID //log.Println("Reader after switch:", reader) buf, err := ioutil.ReadAll(reader) if err != nil { - ResponseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging + ResponseWithError(w, http.StatusInternalServerError, err, start, r.URL.Path, 0) // TODO: send error here only on staging return } e.services.Producer.Produce(topicName, sessionID, buf) // What if not able to send? diff --git a/backend/internal/http/router/response.go b/backend/internal/http/router/response.go index 0b4725419..b66b7c563 100644 --- a/backend/internal/http/router/response.go +++ b/backend/internal/http/router/response.go @@ -4,21 +4,44 @@ import ( "encoding/json" "log" "net/http" + "time" + + metrics "openreplay/backend/pkg/metrics/http" ) -func ResponseWithJSON(w http.ResponseWriter, res interface{}) { +func recordMetrics(requestStart time.Time, url string, code, bodySize int) { + if bodySize > 0 { + metrics.RecordRequestSize(float64(bodySize), url, code) + } + metrics.IncreaseTotalRequests() + metrics.RecordRequestDuration(float64(time.Now().Sub(requestStart).Milliseconds()), url, code) +} + +func ResponseOK(w http.ResponseWriter, requestStart time.Time, url string, bodySize int) { + w.WriteHeader(http.StatusOK) + recordMetrics(requestStart, url, http.StatusOK, bodySize) +} + +func ResponseWithJSON(w http.ResponseWriter, res interface{}, requestStart time.Time, url string, bodySize int) { body, err := json.Marshal(res) if err != nil { log.Println(err) } w.Header().Set("Content-Type", "application/json") w.Write(body) + recordMetrics(requestStart, url, http.StatusOK, bodySize) } -func ResponseWithError(w http.ResponseWriter, code int, err error) { - type response struct { - Error string `json:"error"` +type response struct { + Error string `json:"error"` +} + +func ResponseWithError(w http.ResponseWriter, code int, err error, requestStart time.Time, url string, bodySize int) { + body, err := json.Marshal(&response{err.Error()}) + if err != nil { + log.Println(err) } w.WriteHeader(code) - ResponseWithJSON(w, &response{err.Error()}) + w.Write(body) + recordMetrics(requestStart, url, code, bodySize) } diff --git a/backend/internal/http/router/router.go b/backend/internal/http/router/router.go index 964016dfd..6cd7efe79 100644 --- a/backend/internal/http/router/router.go +++ b/backend/internal/http/router/router.go @@ -1,19 +1,16 @@ package router import ( - "context" "fmt" - "github.com/gorilla/mux" - "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "log" "net/http" + "sync" + "time" + + "github.com/gorilla/mux" http3 "openreplay/backend/internal/config/http" http2 "openreplay/backend/internal/http/services" "openreplay/backend/internal/http/util" - "openreplay/backend/pkg/monitoring" - "sync" - "time" ) type BeaconSize struct { @@ -25,21 +22,16 @@ type Router struct { router *mux.Router cfg *http3.Config services *http2.ServicesBuilder - requestSize syncfloat64.Histogram - requestDuration syncfloat64.Histogram - totalRequests syncfloat64.Counter mutex *sync.RWMutex beaconSizeCache map[uint64]*BeaconSize // Cache for session's beaconSize } -func NewRouter(cfg *http3.Config, services *http2.ServicesBuilder, metrics *monitoring.Metrics) (*Router, error) { +func NewRouter(cfg *http3.Config, services *http2.ServicesBuilder) (*Router, error) { switch { case cfg == nil: return nil, fmt.Errorf("config is empty") case services == nil: return nil, fmt.Errorf("services is empty") - case metrics == nil: - return nil, fmt.Errorf("metrics is empty") } e := &Router{ cfg: cfg, @@ -47,7 +39,6 @@ func NewRouter(cfg *http3.Config, services *http2.ServicesBuilder, metrics *moni mutex: &sync.RWMutex{}, beaconSizeCache: make(map[uint64]*BeaconSize), } - e.initMetrics(metrics) e.init() go e.clearBeaconSizes() return e, nil @@ -115,22 +106,6 @@ func (e *Router) init() { e.router.Use(e.corsMiddleware) } -func (e *Router) initMetrics(metrics *monitoring.Metrics) { - var err error - e.requestSize, err = metrics.RegisterHistogram("requests_body_size") - if err != nil { - log.Printf("can't create requests_body_size metric: %s", err) - } - e.requestDuration, err = metrics.RegisterHistogram("requests_duration") - if err != nil { - log.Printf("can't create requests_duration metric: %s", err) - } - e.totalRequests, err = metrics.RegisterCounter("requests_total") - if err != nil { - log.Printf("can't create requests_total metric: %s", err) - } -} - func (e *Router) root(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) } @@ -149,17 +124,8 @@ func (e *Router) corsMiddleware(next http.Handler) http.Handler { log.Printf("Request: %v - %v ", r.Method, util.SafeString(r.URL.Path)) - requestStart := time.Now() - // Serve request next.ServeHTTP(w, r) - - metricsContext, _ := context.WithTimeout(context.Background(), time.Millisecond*100) - e.totalRequests.Add(metricsContext, 1) - e.requestDuration.Record(metricsContext, - float64(time.Now().Sub(requestStart).Milliseconds()), - []attribute.KeyValue{attribute.String("method", r.URL.Path)}..., - ) }) } diff --git a/backend/internal/sessionender/ender.go b/backend/internal/sessionender/ender.go index c1c2c9b7f..e1ddb0ffe 100644 --- a/backend/internal/sessionender/ender.go +++ b/backend/internal/sessionender/ender.go @@ -1,13 +1,11 @@ package sessionender import ( - "context" - "fmt" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "log" - "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/monitoring" "time" + + "openreplay/backend/pkg/messages" + "openreplay/backend/pkg/metrics/ender" ) // EndedSessionHandler handler for ended sessions @@ -23,32 +21,16 @@ type session struct { // SessionEnder updates timestamp of last message for each session type SessionEnder struct { - timeout int64 - sessions map[uint64]*session // map[sessionID]session - timeCtrl *timeController - activeSessions syncfloat64.UpDownCounter - totalSessions syncfloat64.Counter + timeout int64 + sessions map[uint64]*session // map[sessionID]session + timeCtrl *timeController } -func New(metrics *monitoring.Metrics, timeout int64, parts int) (*SessionEnder, error) { - if metrics == nil { - return nil, fmt.Errorf("metrics module is empty") - } - activeSessions, err := metrics.RegisterUpDownCounter("sessions_active") - if err != nil { - return nil, fmt.Errorf("can't register session.active metric: %s", err) - } - totalSessions, err := metrics.RegisterCounter("sessions_total") - if err != nil { - return nil, fmt.Errorf("can't register session.total metric: %s", err) - } - +func New(timeout int64, parts int) (*SessionEnder, error) { return &SessionEnder{ - timeout: timeout, - sessions: make(map[uint64]*session), - timeCtrl: NewTimeController(parts), - activeSessions: activeSessions, - totalSessions: totalSessions, + timeout: timeout, + sessions: make(map[uint64]*session), + timeCtrl: NewTimeController(parts), }, nil } @@ -74,8 +56,8 @@ func (se *SessionEnder) UpdateSession(msg messages.Message) { lastUserTime: msgTimestamp, // last timestamp from user's machine isEnded: false, } - se.activeSessions.Add(context.Background(), 1) - se.totalSessions.Add(context.Background(), 1) + ender.IncreaseActiveSessions() + ender.IncreaseTotalSessions() return } // Keep the highest user's timestamp for correct session duration value @@ -100,7 +82,8 @@ func (se *SessionEnder) HandleEndedSessions(handler EndedSessionHandler) { sess.isEnded = true if handler(sessID, sess.lastUserTime) { delete(se.sessions, sessID) - se.activeSessions.Add(context.Background(), -1) + ender.DecreaseActiveSessions() + ender.IncreaseClosedSessions() removedSessions++ } else { log.Printf("sessID: %d, userTime: %d", sessID, sess.lastUserTime) diff --git a/backend/internal/sink/assetscache/assets.go b/backend/internal/sink/assetscache/assets.go index 4c63f6897..387ee5c92 100644 --- a/backend/internal/sink/assetscache/assets.go +++ b/backend/internal/sink/assetscache/assets.go @@ -1,20 +1,19 @@ package assetscache import ( - "context" "crypto/md5" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "io" "log" "net/url" - "openreplay/backend/internal/config/sink" - "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/monitoring" - "openreplay/backend/pkg/queue/types" - "openreplay/backend/pkg/url/assets" + metrics "openreplay/backend/pkg/metrics/sink" "strings" "sync" "time" + + "openreplay/backend/internal/config/sink" + "openreplay/backend/pkg/messages" + "openreplay/backend/pkg/queue/types" + "openreplay/backend/pkg/url/assets" ) type CachedAsset struct { @@ -23,52 +22,21 @@ type CachedAsset struct { } type AssetsCache struct { - mutex sync.RWMutex - cfg *sink.Config - rewriter *assets.Rewriter - producer types.Producer - cache map[string]*CachedAsset - blackList []string // use "example.com" to filter all domains or ".example.com" to filter only third-level domain - totalAssets syncfloat64.Counter - cachedAssets syncfloat64.Counter - skippedAssets syncfloat64.Counter - assetSize syncfloat64.Histogram - assetDuration syncfloat64.Histogram + mutex sync.RWMutex + cfg *sink.Config + rewriter *assets.Rewriter + producer types.Producer + cache map[string]*CachedAsset + blackList []string // use "example.com" to filter all domains or ".example.com" to filter only third-level domain } -func New(cfg *sink.Config, rewriter *assets.Rewriter, producer types.Producer, metrics *monitoring.Metrics) *AssetsCache { - // Assets metrics - totalAssets, err := metrics.RegisterCounter("assets_total") - if err != nil { - log.Printf("can't create assets_total metric: %s", err) - } - cachedAssets, err := metrics.RegisterCounter("assets_cached") - if err != nil { - log.Printf("can't create assets_cached metric: %s", err) - } - skippedAssets, err := metrics.RegisterCounter("assets_skipped") - if err != nil { - log.Printf("can't create assets_skipped metric: %s", err) - } - assetSize, err := metrics.RegisterHistogram("asset_size") - if err != nil { - log.Printf("can't create asset_size metric: %s", err) - } - assetDuration, err := metrics.RegisterHistogram("asset_duration") - if err != nil { - log.Printf("can't create asset_duration metric: %s", err) - } +func New(cfg *sink.Config, rewriter *assets.Rewriter, producer types.Producer) *AssetsCache { assetsCache := &AssetsCache{ - cfg: cfg, - rewriter: rewriter, - producer: producer, - cache: make(map[string]*CachedAsset, 64), - blackList: make([]string, 0), - totalAssets: totalAssets, - cachedAssets: cachedAssets, - skippedAssets: skippedAssets, - assetSize: assetSize, - assetDuration: assetDuration, + cfg: cfg, + rewriter: rewriter, + producer: producer, + cache: make(map[string]*CachedAsset, 64), + blackList: make([]string, 0), } // Parse black list for cache layer if len(cfg.CacheBlackList) > 0 { @@ -84,7 +52,7 @@ func New(cfg *sink.Config, rewriter *assets.Rewriter, producer types.Producer, m } func (e *AssetsCache) cleaner() { - cleanTick := time.Tick(time.Minute * 30) + cleanTick := time.Tick(time.Minute * 3) for { select { case <-cleanTick: @@ -105,6 +73,7 @@ func (e *AssetsCache) clearCache() { if int64(now.Sub(cache.ts).Minutes()) > e.cfg.CacheExpiration { deleted++ delete(e.cache, id) + metrics.DecreaseCachedAssets() } } log.Printf("cache cleaner: deleted %d/%d assets", deleted, cacheSize) @@ -232,8 +201,7 @@ func parseHost(baseURL string) (string, error) { } func (e *AssetsCache) handleCSS(sessionID uint64, baseURL string, css string) string { - ctx := context.Background() - e.totalAssets.Add(ctx, 1) + metrics.IncreaseTotalAssets() // Try to find asset in cache h := md5.New() // Cut first part of url (scheme + host) @@ -255,7 +223,7 @@ func (e *AssetsCache) handleCSS(sessionID uint64, baseURL string, css string) st e.mutex.RUnlock() if ok { if int64(time.Now().Sub(cachedAsset.ts).Minutes()) < e.cfg.CacheExpiration { - e.skippedAssets.Add(ctx, 1) + metrics.IncreaseSkippedAssets() return cachedAsset.msg } } @@ -267,8 +235,8 @@ func (e *AssetsCache) handleCSS(sessionID uint64, baseURL string, css string) st start := time.Now() res := e.getRewrittenCSS(sessionID, baseURL, css) duration := time.Now().Sub(start).Milliseconds() - e.assetSize.Record(ctx, float64(len(res))) - e.assetDuration.Record(ctx, float64(duration)) + metrics.RecordAssetSize(float64(len(res))) + metrics.RecordProcessAssetDuration(float64(duration)) // Save asset to cache if we spent more than threshold if duration > e.cfg.CacheThreshold { e.mutex.Lock() @@ -277,7 +245,7 @@ func (e *AssetsCache) handleCSS(sessionID uint64, baseURL string, css string) st ts: time.Now(), } e.mutex.Unlock() - e.cachedAssets.Add(ctx, 1) + metrics.IncreaseCachedAssets() } // Return rewritten asset return res diff --git a/backend/internal/storage/storage.go b/backend/internal/storage/storage.go index fbe9e2228..1e2507163 100644 --- a/backend/internal/storage/storage.go +++ b/backend/internal/storage/storage.go @@ -2,20 +2,20 @@ package storage import ( "bytes" - "context" "fmt" - gzip "github.com/klauspost/pgzip" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "log" - config "openreplay/backend/internal/config/storage" - "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/monitoring" - "openreplay/backend/pkg/storage" "os" "strconv" "strings" "sync" "time" + + config "openreplay/backend/internal/config/storage" + "openreplay/backend/pkg/messages" + metrics "openreplay/backend/pkg/metrics/storage" + "openreplay/backend/pkg/storage" + + gzip "github.com/klauspost/pgzip" ) type FileType string @@ -25,6 +25,13 @@ const ( DEV FileType = "/devtools.mob" ) +func (t FileType) String() string { + if t == DOM { + return "dom" + } + return "devtools" +} + type Task struct { id string doms *bytes.Buffer @@ -36,92 +43,23 @@ type Storage struct { cfg *config.Config s3 *storage.S3 startBytes []byte - - totalSessions syncfloat64.Counter - sessionDOMSize syncfloat64.Histogram - sessionDEVSize syncfloat64.Histogram - readingDOMTime syncfloat64.Histogram - readingDEVTime syncfloat64.Histogram - sortingDOMTime syncfloat64.Histogram - sortingDEVTime syncfloat64.Histogram - archivingDOMTime syncfloat64.Histogram - archivingDEVTime syncfloat64.Histogram - uploadingDOMTime syncfloat64.Histogram - uploadingDEVTime syncfloat64.Histogram - - tasks chan *Task - ready chan struct{} + tasks chan *Task + ready chan struct{} } -func New(cfg *config.Config, s3 *storage.S3, metrics *monitoring.Metrics) (*Storage, error) { +func New(cfg *config.Config, s3 *storage.S3) (*Storage, error) { switch { case cfg == nil: return nil, fmt.Errorf("config is empty") case s3 == nil: return nil, fmt.Errorf("s3 storage is empty") } - // Create metrics - totalSessions, err := metrics.RegisterCounter("sessions_total") - if err != nil { - log.Printf("can't create sessions_total metric: %s", err) - } - sessionDOMSize, err := metrics.RegisterHistogram("sessions_size") - if err != nil { - log.Printf("can't create session_size metric: %s", err) - } - sessionDevtoolsSize, err := metrics.RegisterHistogram("sessions_dt_size") - if err != nil { - log.Printf("can't create sessions_dt_size metric: %s", err) - } - readingDOMTime, err := metrics.RegisterHistogram("reading_duration") - if err != nil { - log.Printf("can't create reading_duration metric: %s", err) - } - readingDEVTime, err := metrics.RegisterHistogram("reading_dt_duration") - if err != nil { - log.Printf("can't create reading_duration metric: %s", err) - } - sortingDOMTime, err := metrics.RegisterHistogram("sorting_duration") - if err != nil { - log.Printf("can't create reading_duration metric: %s", err) - } - sortingDEVTime, err := metrics.RegisterHistogram("sorting_dt_duration") - if err != nil { - log.Printf("can't create reading_duration metric: %s", err) - } - archivingDOMTime, err := metrics.RegisterHistogram("archiving_duration") - if err != nil { - log.Printf("can't create archiving_duration metric: %s", err) - } - archivingDEVTime, err := metrics.RegisterHistogram("archiving_dt_duration") - if err != nil { - log.Printf("can't create archiving_duration metric: %s", err) - } - uploadingDOMTime, err := metrics.RegisterHistogram("uploading_duration") - if err != nil { - log.Printf("can't create uploading_duration metric: %s", err) - } - uploadingDEVTime, err := metrics.RegisterHistogram("uploading_dt_duration") - if err != nil { - log.Printf("can't create uploading_duration metric: %s", err) - } newStorage := &Storage{ - cfg: cfg, - s3: s3, - startBytes: make([]byte, cfg.FileSplitSize), - totalSessions: totalSessions, - sessionDOMSize: sessionDOMSize, - sessionDEVSize: sessionDevtoolsSize, - readingDOMTime: readingDOMTime, - readingDEVTime: readingDEVTime, - sortingDOMTime: sortingDOMTime, - sortingDEVTime: sortingDEVTime, - archivingDOMTime: archivingDOMTime, - archivingDEVTime: archivingDEVTime, - uploadingDOMTime: uploadingDOMTime, - uploadingDEVTime: uploadingDEVTime, - tasks: make(chan *Task, 1), - ready: make(chan struct{}), + cfg: cfg, + s3: s3, + startBytes: make([]byte, cfg.FileSplitSize), + tasks: make(chan *Task, 1), + ready: make(chan struct{}), } go newStorage.worker() return newStorage, nil @@ -187,11 +125,7 @@ func (s *Storage) openSession(filePath string, tp FileType) ([]byte, error) { if err != nil { return nil, fmt.Errorf("can't sort session, err: %s", err) } - if tp == DOM { - s.sortingDOMTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds())) - } else { - s.sortingDEVTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds())) - } + metrics.RecordSessionSortDuration(float64(time.Now().Sub(start).Milliseconds()), tp.String()) return res, nil } @@ -215,26 +149,19 @@ func (s *Storage) prepareSession(path string, tp FileType, task *Task) error { if err != nil { return err } - durRead := time.Now().Sub(startRead).Milliseconds() - // Send metrics - ctx, _ := context.WithTimeout(context.Background(), time.Millisecond*200) - if tp == DOM { - s.sessionDOMSize.Record(ctx, float64(len(mob))) - s.readingDOMTime.Record(ctx, float64(durRead)) - } else { - s.sessionDEVSize.Record(ctx, float64(len(mob))) - s.readingDEVTime.Record(ctx, float64(durRead)) - } + metrics.RecordSessionSize(float64(len(mob)), tp.String()) + metrics.RecordSessionReadDuration(float64(time.Now().Sub(startRead).Milliseconds()), tp.String()) + // Encode and compress session if tp == DEV { - startCompress := time.Now() + start := time.Now() task.dev = s.compressSession(mob) - s.archivingDEVTime.Record(ctx, float64(time.Now().Sub(startCompress).Milliseconds())) + metrics.RecordSessionCompressDuration(float64(time.Now().Sub(start).Milliseconds()), tp.String()) } else { if len(mob) <= s.cfg.FileSplitSize { - startCompress := time.Now() + start := time.Now() task.doms = s.compressSession(mob) - s.archivingDOMTime.Record(ctx, float64(time.Now().Sub(startCompress).Milliseconds())) + metrics.RecordSessionCompressDuration(float64(time.Now().Sub(start).Milliseconds()), tp.String()) return nil } wg := &sync.WaitGroup{} @@ -253,7 +180,7 @@ func (s *Storage) prepareSession(path string, tp FileType, task *Task) error { wg.Done() }() wg.Wait() - s.archivingDOMTime.Record(ctx, float64(firstPart+secondPart)) + metrics.RecordSessionCompressDuration(float64(firstPart+secondPart), tp.String()) } return nil } @@ -324,11 +251,9 @@ func (s *Storage) uploadSession(task *Task) { wg.Done() }() wg.Wait() - // Record metrics - ctx, _ := context.WithTimeout(context.Background(), time.Millisecond*200) - s.uploadingDOMTime.Record(ctx, float64(uploadDoms+uploadDome)) - s.uploadingDEVTime.Record(ctx, float64(uploadDev)) - s.totalSessions.Add(ctx, 1) + metrics.RecordSessionUploadDuration(float64(uploadDoms+uploadDome), DOM.String()) + metrics.RecordSessionUploadDuration(float64(uploadDev), DEV.String()) + metrics.IncreaseStorageTotalSessions() } func (s *Storage) worker() { diff --git a/backend/pkg/db/postgres/batches.go b/backend/pkg/db/postgres/batches.go index c1283da10..abdee36f2 100644 --- a/backend/pkg/db/postgres/batches.go +++ b/backend/pkg/db/postgres/batches.go @@ -1,14 +1,13 @@ package postgres import ( - "context" - "github.com/jackc/pgx/v4" - "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "log" - "openreplay/backend/pkg/monitoring" "strings" "time" + + "openreplay/backend/pkg/metrics/database" + + "github.com/jackc/pgx/v4" ) type batchItem struct { @@ -78,21 +77,17 @@ func NewBatchesTask(size int) *batchesTask { } type BatchSet struct { - c Pool - batches map[uint64]*SessionBatch - batchQueueLimit int - batchSizeLimit int - batchSizeBytes syncfloat64.Histogram - batchSizeLines syncfloat64.Histogram - sqlRequestTime syncfloat64.Histogram - sqlRequestCounter syncfloat64.Counter - updates map[uint64]*sessionUpdates - workerTask chan *batchesTask - done chan struct{} - finished chan struct{} + c Pool + batches map[uint64]*SessionBatch + batchQueueLimit int + batchSizeLimit int + updates map[uint64]*sessionUpdates + workerTask chan *batchesTask + done chan struct{} + finished chan struct{} } -func NewBatchSet(c Pool, queueLimit, sizeLimit int, metrics *monitoring.Metrics) *BatchSet { +func NewBatchSet(c Pool, queueLimit, sizeLimit int) *BatchSet { bs := &BatchSet{ c: c, batches: make(map[uint64]*SessionBatch), @@ -103,31 +98,10 @@ func NewBatchSet(c Pool, queueLimit, sizeLimit int, metrics *monitoring.Metrics) finished: make(chan struct{}), updates: make(map[uint64]*sessionUpdates), } - bs.initMetrics(metrics) go bs.worker() return bs } -func (conn *BatchSet) initMetrics(metrics *monitoring.Metrics) { - var err error - conn.batchSizeBytes, err = metrics.RegisterHistogram("batch_size_bytes") - if err != nil { - log.Printf("can't create batchSizeBytes metric: %s", err) - } - conn.batchSizeLines, err = metrics.RegisterHistogram("batch_size_lines") - if err != nil { - log.Printf("can't create batchSizeLines metric: %s", err) - } - conn.sqlRequestTime, err = metrics.RegisterHistogram("sql_request_time") - if err != nil { - log.Printf("can't create sqlRequestTime metric: %s", err) - } - conn.sqlRequestCounter, err = metrics.RegisterCounter("sql_request_number") - if err != nil { - log.Printf("can't create sqlRequestNumber metric: %s", err) - } -} - func (conn *BatchSet) getBatch(sessionID uint64) *SessionBatch { sessionID = sessionID % 10 if _, ok := conn.batches[sessionID]; !ok { @@ -194,11 +168,10 @@ func (conn *BatchSet) sendBatches(t *batchesTask) { // Append session update sql request to the end of batch batch.Prepare() // Record batch size in bytes and number of lines - conn.batchSizeBytes.Record(context.Background(), float64(batch.Size())) - conn.batchSizeLines.Record(context.Background(), float64(batch.Len())) + database.RecordBatchSize(float64(batch.Size())) + database.RecordBatchElements(float64(batch.Len())) start := time.Now() - isFailed := false // Send batch to db and execute br := conn.c.SendBatch(batch.batch) @@ -209,15 +182,11 @@ func (conn *BatchSet) sendBatches(t *batchesTask) { failedSql := batch.items[i] query := strings.ReplaceAll(failedSql.query, "\n", " ") log.Println("failed sql req:", query, failedSql.arguments) - isFailed = true } } br.Close() // returns err - dur := time.Now().Sub(start).Milliseconds() - conn.sqlRequestTime.Record(context.Background(), float64(dur), - attribute.String("method", "batch"), attribute.Bool("failed", isFailed)) - conn.sqlRequestCounter.Add(context.Background(), 1, - attribute.String("method", "batch"), attribute.Bool("failed", isFailed)) + database.RecordBatchInsertDuration(float64(time.Now().Sub(start).Milliseconds())) + database.IncreaseTotalBatches() } } diff --git a/backend/pkg/db/postgres/bulk.go b/backend/pkg/db/postgres/bulk.go index 8c6c42f78..b6a2ddd35 100644 --- a/backend/pkg/db/postgres/bulk.go +++ b/backend/pkg/db/postgres/bulk.go @@ -2,13 +2,9 @@ package postgres import ( "bytes" - "context" "errors" "fmt" - "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" - "log" - "openreplay/backend/pkg/monitoring" + "openreplay/backend/pkg/metrics/database" "time" ) @@ -25,15 +21,13 @@ type Bulk interface { } type bulkImpl struct { - conn Pool - table string - columns string - template string - setSize int - sizeLimit int - values []interface{} - bulkSize syncfloat64.Histogram - bulkDuration syncfloat64.Histogram + conn Pool + table string + columns string + template string + setSize int + sizeLimit int + values []interface{} } func (b *bulkImpl) Append(args ...interface{}) error { @@ -79,18 +73,15 @@ func (b *bulkImpl) send() error { return fmt.Errorf("send bulk err: %s", err) } // Save bulk metrics - ctx, _ := context.WithTimeout(context.Background(), time.Millisecond*200) - b.bulkDuration.Record(ctx, float64(time.Now().Sub(start).Milliseconds()), attribute.String("table", b.table)) - b.bulkSize.Record(ctx, float64(size), attribute.String("table", b.table)) + database.RecordBulkElements(float64(size), "pg", b.table) + database.RecordBulkInsertDuration(float64(time.Now().Sub(start).Milliseconds()), "pg", b.table) return nil } -func NewBulk(conn Pool, metrics *monitoring.Metrics, table, columns, template string, setSize, sizeLimit int) (Bulk, error) { +func NewBulk(conn Pool, table, columns, template string, setSize, sizeLimit int) (Bulk, error) { switch { case conn == nil: return nil, errors.New("db conn is empty") - case metrics == nil: - return nil, errors.New("metrics is empty") case table == "": return nil, errors.New("table is empty") case columns == "": @@ -102,23 +93,13 @@ func NewBulk(conn Pool, metrics *monitoring.Metrics, table, columns, template st case sizeLimit <= 0: return nil, errors.New("size limit is wrong") } - messagesInBulk, err := metrics.RegisterHistogram("messages_in_bulk") - if err != nil { - log.Printf("can't create messages_size metric: %s", err) - } - bulkInsertDuration, err := metrics.RegisterHistogram("bulk_insert_duration") - if err != nil { - log.Printf("can't create messages_size metric: %s", err) - } return &bulkImpl{ - conn: conn, - table: table, - columns: columns, - template: template, - setSize: setSize, - sizeLimit: sizeLimit, - values: make([]interface{}, 0, setSize*sizeLimit), - bulkSize: messagesInBulk, - bulkDuration: bulkInsertDuration, + conn: conn, + table: table, + columns: columns, + template: template, + setSize: setSize, + sizeLimit: sizeLimit, + values: make([]interface{}, 0, setSize*sizeLimit), }, nil } diff --git a/backend/pkg/db/postgres/bulks.go b/backend/pkg/db/postgres/bulks.go index 5774ba184..f3e9e95c9 100644 --- a/backend/pkg/db/postgres/bulks.go +++ b/backend/pkg/db/postgres/bulks.go @@ -2,7 +2,6 @@ package postgres import ( "log" - "openreplay/backend/pkg/monitoring" "time" ) @@ -30,16 +29,14 @@ type BulkSet struct { webCustomEvents Bulk webClickEvents Bulk webNetworkRequest Bulk - metrics *monitoring.Metrics workerTask chan *bulksTask done chan struct{} finished chan struct{} } -func NewBulkSet(c Pool, metrics *monitoring.Metrics) *BulkSet { +func NewBulkSet(c Pool) *BulkSet { bs := &BulkSet{ c: c, - metrics: metrics, workerTask: make(chan *bulksTask, 1), done: make(chan struct{}), finished: make(chan struct{}), @@ -86,7 +83,7 @@ func (conn *BulkSet) Get(name string) Bulk { func (conn *BulkSet) initBulks() { var err error - conn.autocompletes, err = NewBulk(conn.c, conn.metrics, + conn.autocompletes, err = NewBulk(conn.c, "autocomplete", "(value, type, project_id)", "($%d, $%d, $%d)", @@ -94,7 +91,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create autocomplete bulk: %s", err) } - conn.requests, err = NewBulk(conn.c, conn.metrics, + conn.requests, err = NewBulk(conn.c, "events_common.requests", "(session_id, timestamp, seq_index, url, duration, success)", "($%d, $%d, $%d, LEFT($%d, 8000), $%d, $%d)", @@ -102,7 +99,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create requests bulk: %s", err) } - conn.customEvents, err = NewBulk(conn.c, conn.metrics, + conn.customEvents, err = NewBulk(conn.c, "events_common.customs", "(session_id, timestamp, seq_index, name, payload)", "($%d, $%d, $%d, LEFT($%d, 2000), $%d)", @@ -110,7 +107,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create customEvents bulk: %s", err) } - conn.webPageEvents, err = NewBulk(conn.c, conn.metrics, + conn.webPageEvents, err = NewBulk(conn.c, "events.pages", "(session_id, message_id, timestamp, referrer, base_referrer, host, path, query, dom_content_loaded_time, "+ "load_time, response_end, first_paint_time, first_contentful_paint_time, speed_index, visually_complete, "+ @@ -122,7 +119,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webPageEvents bulk: %s", err) } - conn.webInputEvents, err = NewBulk(conn.c, conn.metrics, + conn.webInputEvents, err = NewBulk(conn.c, "events.inputs", "(session_id, message_id, timestamp, value, label)", "($%d, $%d, $%d, LEFT($%d, 2000), NULLIF(LEFT($%d, 2000),''))", @@ -130,7 +127,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webPageEvents bulk: %s", err) } - conn.webGraphQL, err = NewBulk(conn.c, conn.metrics, + conn.webGraphQL, err = NewBulk(conn.c, "events.graphql", "(session_id, timestamp, message_id, name, request_body, response_body)", "($%d, $%d, $%d, LEFT($%d, 2000), $%d, $%d)", @@ -138,7 +135,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webPageEvents bulk: %s", err) } - conn.webErrors, err = NewBulk(conn.c, conn.metrics, + conn.webErrors, err = NewBulk(conn.c, "errors", "(error_id, project_id, source, name, message, payload)", "($%d, $%d, $%d, $%d, $%d, $%d::jsonb)", @@ -146,7 +143,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webErrors bulk: %s", err) } - conn.webErrorEvents, err = NewBulk(conn.c, conn.metrics, + conn.webErrorEvents, err = NewBulk(conn.c, "events.errors", "(session_id, message_id, timestamp, error_id)", "($%d, $%d, $%d, $%d)", @@ -154,7 +151,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webErrorEvents bulk: %s", err) } - conn.webErrorTags, err = NewBulk(conn.c, conn.metrics, + conn.webErrorTags, err = NewBulk(conn.c, "public.errors_tags", "(session_id, message_id, error_id, key, value)", "($%d, $%d, $%d, $%d, $%d)", @@ -162,7 +159,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webErrorEvents bulk: %s", err) } - conn.webIssues, err = NewBulk(conn.c, conn.metrics, + conn.webIssues, err = NewBulk(conn.c, "issues", "(project_id, issue_id, type, context_string)", "($%d, $%d, $%d, $%d)", @@ -170,7 +167,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webIssues bulk: %s", err) } - conn.webIssueEvents, err = NewBulk(conn.c, conn.metrics, + conn.webIssueEvents, err = NewBulk(conn.c, "events_common.issues", "(session_id, issue_id, timestamp, seq_index, payload)", "($%d, $%d, $%d, $%d, CAST($%d AS jsonb))", @@ -178,7 +175,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webIssueEvents bulk: %s", err) } - conn.webCustomEvents, err = NewBulk(conn.c, conn.metrics, + conn.webCustomEvents, err = NewBulk(conn.c, "events_common.customs", "(session_id, seq_index, timestamp, name, payload, level)", "($%d, $%d, $%d, LEFT($%d, 2000), $%d, $%d)", @@ -186,7 +183,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webCustomEvents bulk: %s", err) } - conn.webClickEvents, err = NewBulk(conn.c, conn.metrics, + conn.webClickEvents, err = NewBulk(conn.c, "events.clicks", "(session_id, message_id, timestamp, label, selector, url, path)", "($%d, $%d, $%d, NULLIF(LEFT($%d, 2000), ''), LEFT($%d, 8000), LEFT($%d, 2000), LEFT($%d, 2000))", @@ -194,7 +191,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webClickEvents bulk: %s", err) } - conn.webNetworkRequest, err = NewBulk(conn.c, conn.metrics, + conn.webNetworkRequest, err = NewBulk(conn.c, "events_common.requests", "(session_id, timestamp, seq_index, url, host, path, query, request_body, response_body, status_code, method, duration, success)", "($%d, $%d, $%d, LEFT($%d, 8000), LEFT($%d, 300), LEFT($%d, 2000), LEFT($%d, 8000), $%d, $%d, $%d::smallint, NULLIF($%d, '')::http_method, $%d, $%d)", diff --git a/backend/pkg/db/postgres/connector.go b/backend/pkg/db/postgres/connector.go index 2e8f3d425..6904dc135 100644 --- a/backend/pkg/db/postgres/connector.go +++ b/backend/pkg/db/postgres/connector.go @@ -2,11 +2,10 @@ package postgres import ( "context" - "github.com/jackc/pgx/v4/pgxpool" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "log" + + "github.com/jackc/pgx/v4/pgxpool" "openreplay/backend/pkg/db/types" - "openreplay/backend/pkg/monitoring" ) type CH interface { @@ -15,36 +14,28 @@ type CH interface { // Conn contains batches, bulks and cache for all sessions type Conn struct { - c Pool - batches *BatchSet - bulks *BulkSet - batchSizeBytes syncfloat64.Histogram - batchSizeLines syncfloat64.Histogram - sqlRequestTime syncfloat64.Histogram - sqlRequestCounter syncfloat64.Counter - chConn CH + c Pool + batches *BatchSet + bulks *BulkSet + chConn CH } func (conn *Conn) SetClickHouse(ch CH) { conn.chConn = ch } -func NewConn(url string, queueLimit, sizeLimit int, metrics *monitoring.Metrics) *Conn { - if metrics == nil { - log.Fatalf("metrics is nil") - } +func NewConn(url string, queueLimit, sizeLimit int) *Conn { c, err := pgxpool.Connect(context.Background(), url) if err != nil { log.Fatalf("pgxpool.Connect err: %s", err) } conn := &Conn{} - conn.initMetrics(metrics) - conn.c, err = NewPool(c, conn.sqlRequestTime, conn.sqlRequestCounter) + conn.c, err = NewPool(c) if err != nil { log.Fatalf("can't create new pool wrapper: %s", err) } - conn.bulks = NewBulkSet(conn.c, metrics) - conn.batches = NewBatchSet(conn.c, queueLimit, sizeLimit, metrics) + conn.bulks = NewBulkSet(conn.c) + conn.batches = NewBatchSet(conn.c, queueLimit, sizeLimit) return conn } @@ -55,26 +46,6 @@ func (conn *Conn) Close() error { return nil } -func (conn *Conn) initMetrics(metrics *monitoring.Metrics) { - var err error - conn.batchSizeBytes, err = metrics.RegisterHistogram("batch_size_bytes") - if err != nil { - log.Printf("can't create batchSizeBytes metric: %s", err) - } - conn.batchSizeLines, err = metrics.RegisterHistogram("batch_size_lines") - if err != nil { - log.Printf("can't create batchSizeLines metric: %s", err) - } - conn.sqlRequestTime, err = metrics.RegisterHistogram("sql_request_time") - if err != nil { - log.Printf("can't create sqlRequestTime metric: %s", err) - } - conn.sqlRequestCounter, err = metrics.RegisterCounter("sql_request_number") - if err != nil { - log.Printf("can't create sqlRequestNumber metric: %s", err) - } -} - func (conn *Conn) insertAutocompleteValue(sessionID uint64, projectID uint32, tp string, value string) { if len(value) == 0 { return diff --git a/backend/pkg/db/postgres/pool.go b/backend/pkg/db/postgres/pool.go index 5f9cbaa29..5214be8d0 100644 --- a/backend/pkg/db/postgres/pool.go +++ b/backend/pkg/db/postgres/pool.go @@ -3,12 +3,12 @@ package postgres import ( "context" "errors" - "github.com/jackc/pgx/v4" - "github.com/jackc/pgx/v4/pgxpool" - "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "strings" "time" + + "github.com/jackc/pgx/v4" + "github.com/jackc/pgx/v4/pgxpool" + "openreplay/backend/pkg/metrics/database" ) // Pool is a pgx.Pool wrapper with metrics integration @@ -22,19 +22,15 @@ type Pool interface { } type poolImpl struct { - conn *pgxpool.Pool - sqlRequestTime syncfloat64.Histogram - sqlRequestCounter syncfloat64.Counter + conn *pgxpool.Pool } func (p *poolImpl) Query(sql string, args ...interface{}) (pgx.Rows, error) { start := time.Now() res, err := p.conn.Query(getTimeoutContext(), sql, args...) method, table := methodName(sql) - p.sqlRequestTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()), - attribute.String("method", method), attribute.String("table", table)) - p.sqlRequestCounter.Add(context.Background(), 1, - attribute.String("method", method), attribute.String("table", table)) + database.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table) + database.IncreaseTotalRequests(method, table) return res, err } @@ -42,10 +38,8 @@ func (p *poolImpl) QueryRow(sql string, args ...interface{}) pgx.Row { start := time.Now() res := p.conn.QueryRow(getTimeoutContext(), sql, args...) method, table := methodName(sql) - p.sqlRequestTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()), - attribute.String("method", method), attribute.String("table", table)) - p.sqlRequestCounter.Add(context.Background(), 1, - attribute.String("method", method), attribute.String("table", table)) + database.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table) + database.IncreaseTotalRequests(method, table) return res } @@ -53,45 +47,37 @@ func (p *poolImpl) Exec(sql string, arguments ...interface{}) error { start := time.Now() _, err := p.conn.Exec(getTimeoutContext(), sql, arguments...) method, table := methodName(sql) - p.sqlRequestTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()), - attribute.String("method", method), attribute.String("table", table)) - p.sqlRequestCounter.Add(context.Background(), 1, - attribute.String("method", method), attribute.String("table", table)) + database.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table) + database.IncreaseTotalRequests(method, table) return err } func (p *poolImpl) SendBatch(b *pgx.Batch) pgx.BatchResults { start := time.Now() res := p.conn.SendBatch(getTimeoutContext(), b) - p.sqlRequestTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()), - attribute.String("method", "sendBatch")) - p.sqlRequestCounter.Add(context.Background(), 1, - attribute.String("method", "sendBatch")) + database.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "sendBatch", "") + database.IncreaseTotalRequests("sendBatch", "") return res } func (p *poolImpl) Begin() (*_Tx, error) { start := time.Now() tx, err := p.conn.Begin(context.Background()) - p.sqlRequestTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()), - attribute.String("method", "begin")) - p.sqlRequestCounter.Add(context.Background(), 1, - attribute.String("method", "begin")) - return &_Tx{tx, p.sqlRequestTime, p.sqlRequestCounter}, err + database.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "begin", "") + database.IncreaseTotalRequests("begin", "") + return &_Tx{tx}, err } func (p *poolImpl) Close() { p.conn.Close() } -func NewPool(conn *pgxpool.Pool, sqlRequestTime syncfloat64.Histogram, sqlRequestCounter syncfloat64.Counter) (Pool, error) { +func NewPool(conn *pgxpool.Pool) (Pool, error) { if conn == nil { return nil, errors.New("conn is empty") } return &poolImpl{ - conn: conn, - sqlRequestTime: sqlRequestTime, - sqlRequestCounter: sqlRequestCounter, + conn: conn, }, nil } @@ -99,38 +85,30 @@ func NewPool(conn *pgxpool.Pool, sqlRequestTime syncfloat64.Histogram, sqlReques type _Tx struct { pgx.Tx - sqlRequestTime syncfloat64.Histogram - sqlRequestCounter syncfloat64.Counter } func (tx *_Tx) exec(sql string, args ...interface{}) error { start := time.Now() _, err := tx.Exec(context.Background(), sql, args...) method, table := methodName(sql) - tx.sqlRequestTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()), - attribute.String("method", method), attribute.String("table", table)) - tx.sqlRequestCounter.Add(context.Background(), 1, - attribute.String("method", method), attribute.String("table", table)) + database.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table) + database.IncreaseTotalRequests(method, table) return err } func (tx *_Tx) rollback() error { start := time.Now() err := tx.Rollback(context.Background()) - tx.sqlRequestTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()), - attribute.String("method", "rollback")) - tx.sqlRequestCounter.Add(context.Background(), 1, - attribute.String("method", "rollback")) + database.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "rollback", "") + database.IncreaseTotalRequests("rollback", "") return err } func (tx *_Tx) commit() error { start := time.Now() err := tx.Commit(context.Background()) - tx.sqlRequestTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()), - attribute.String("method", "commit")) - tx.sqlRequestCounter.Add(context.Background(), 1, - attribute.String("method", "commit")) + database.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "commit", "") + database.IncreaseTotalRequests("commit", "") return err } @@ -169,7 +147,8 @@ func methodName(sql string) (string, string) { case "update": table = strings.TrimSpace(parts[1]) case "insert": - table = strings.TrimSpace(parts[2]) + tableNameParts := strings.Split(strings.TrimSpace(parts[2]), "(") + table = tableNameParts[0] } return cmd, table } diff --git a/backend/pkg/messages/iterator-sink.go b/backend/pkg/messages/iterator-sink.go index a5897c3b7..be12b63eb 100644 --- a/backend/pkg/messages/iterator-sink.go +++ b/backend/pkg/messages/iterator-sink.go @@ -3,6 +3,7 @@ package messages import ( "fmt" "log" + "openreplay/backend/pkg/metrics/sink" ) type sinkMessageIteratorImpl struct { @@ -53,6 +54,8 @@ func (i *sinkMessageIteratorImpl) sendBatchEnd() { } func (i *sinkMessageIteratorImpl) Iterate(batchData []byte, batchInfo *BatchInfo) { + sink.RecordBatchSize(float64(len(batchData))) + sink.IncreaseTotalBatches() // Create new message reader reader := NewMessageReader(batchData) diff --git a/backend/pkg/messages/iterator.go b/backend/pkg/messages/iterator.go index a6717257e..f7b014d30 100644 --- a/backend/pkg/messages/iterator.go +++ b/backend/pkg/messages/iterator.go @@ -74,12 +74,13 @@ func (i *messageIteratorImpl) Iterate(batchData []byte, batchInfo *BatchInfo) { i.messageInfo.Index++ msg := reader.Message() + msgType := msg.TypeID() // Preprocess "system" messages if _, ok := i.preFilter[msg.TypeID()]; ok { msg = msg.Decode() if msg == nil { - log.Printf("decode error, type: %d, info: %s", msg.TypeID(), i.batchInfo.Info()) + log.Printf("decode error, type: %d, info: %s", msgType, i.batchInfo.Info()) return } msg = transformDeprecated(msg) @@ -99,7 +100,7 @@ func (i *messageIteratorImpl) Iterate(batchData []byte, batchInfo *BatchInfo) { if i.autoDecode { msg = msg.Decode() if msg == nil { - log.Printf("decode error, type: %d, info: %s", msg.TypeID(), i.batchInfo.Info()) + log.Printf("decode error, type: %d, info: %s", msgType, i.batchInfo.Info()) return } } diff --git a/backend/pkg/metrics/assets/metrics.go b/backend/pkg/metrics/assets/metrics.go new file mode 100644 index 000000000..44af0dfa9 --- /dev/null +++ b/backend/pkg/metrics/assets/metrics.go @@ -0,0 +1,72 @@ +package assets + +import ( + "github.com/prometheus/client_golang/prometheus" + "openreplay/backend/pkg/metrics/common" + "strconv" +) + +var assetsProcessedSessions = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "assets", + Name: "processed_total", + Help: "A counter displaying the total count of processed assets.", + }, +) + +func IncreaseProcessesSessions() { + assetsProcessedSessions.Inc() +} + +var assetsSavedSessions = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "assets", + Name: "saved_total", + Help: "A counter displaying the total number of cached assets.", + }, +) + +func IncreaseSavedSessions() { + assetsSavedSessions.Inc() +} + +var assetsDownloadDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "assets", + Name: "download_duration_seconds", + Help: "A histogram displaying the duration of downloading for each asset in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"response_code"}, +) + +func RecordDownloadDuration(durMillis float64, code int) { + assetsDownloadDuration.WithLabelValues(strconv.Itoa(code)).Observe(durMillis / 1000.0) +} + +var assetsUploadDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "assets", + Name: "upload_s3_duration_seconds", + Help: "A histogram displaying the duration of uploading to s3 for each asset in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"failed"}, +) + +func RecordUploadDuration(durMillis float64, isFailed bool) { + failed := "false" + if isFailed { + failed = "true" + } + assetsUploadDuration.WithLabelValues(failed).Observe(durMillis / 1000.0) +} + +func List() []prometheus.Collector { + return []prometheus.Collector{ + assetsProcessedSessions, + assetsSavedSessions, + assetsDownloadDuration, + assetsUploadDuration, + } +} diff --git a/backend/pkg/metrics/common/metrics.go b/backend/pkg/metrics/common/metrics.go new file mode 100644 index 000000000..85b66c713 --- /dev/null +++ b/backend/pkg/metrics/common/metrics.go @@ -0,0 +1,11 @@ +package common + +// DefaultDurationBuckets is a set of buckets from 5 milliseconds to 1000 seconds (16.6667 minutes) +var DefaultDurationBuckets = []float64{.005, .01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10, 25, 50, 100, 250, 500, 1000} + +// DefaultSizeBuckets is a set of buckets from 1 byte to 1_000_000_000 bytes (~1 Gb) +var DefaultSizeBuckets = []float64{1, 10, 50, 100, 250, 500, 1000, 2500, 5000, 10000, 25000, 50000, 100_000, 250_000, + 500_000, 1_000_000, 10_000_000, 100_000_000, 1_000_000_000} + +// DefaultBuckets is a set of buckets from 1 to 1_000_000 elements +var DefaultBuckets = []float64{1, 2, 5, 10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10_000, 50_000, 100_000, 1_000_000} diff --git a/backend/pkg/metrics/database/metrics.go b/backend/pkg/metrics/database/metrics.go new file mode 100644 index 000000000..a9f3990cd --- /dev/null +++ b/backend/pkg/metrics/database/metrics.go @@ -0,0 +1,127 @@ +package database + +import ( + "github.com/prometheus/client_golang/prometheus" + "openreplay/backend/pkg/metrics/common" +) + +var dbBatchSize = prometheus.NewHistogram( + prometheus.HistogramOpts{ + Namespace: "db", + Name: "batch_size_bytes", + Help: "A histogram displaying the batch size in bytes.", + Buckets: common.DefaultSizeBuckets, + }, +) + +func RecordBatchSize(size float64) { + dbBatchSize.Observe(size) +} + +var dbBatchElements = prometheus.NewHistogram( + prometheus.HistogramOpts{ + Namespace: "db", + Name: "batch_size_elements", + Help: "A histogram displaying the number of SQL commands in each batch.", + Buckets: common.DefaultBuckets, + }, +) + +func RecordBatchElements(number float64) { + dbBatchElements.Observe(number) +} + +var dbBatchInsertDuration = prometheus.NewHistogram( + prometheus.HistogramOpts{ + Namespace: "db", + Name: "batch_insert_duration_seconds", + Help: "A histogram displaying the duration of batch inserts in seconds.", + Buckets: common.DefaultDurationBuckets, + }, +) + +func RecordBatchInsertDuration(durMillis float64) { + dbBatchInsertDuration.Observe(durMillis / 1000.0) +} + +var dbBulkSize = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "db", + Name: "bulk_size_bytes", + Help: "A histogram displaying the bulk size in bytes.", + Buckets: common.DefaultSizeBuckets, + }, + []string{"db", "table"}, +) + +func RecordBulkSize(size float64, db, table string) { + dbBulkSize.WithLabelValues(db, table).Observe(size) +} + +var dbBulkElements = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "db", + Name: "bulk_size_elements", + Help: "A histogram displaying the size of data set in each bulk.", + Buckets: common.DefaultBuckets, + }, + []string{"db", "table"}, +) + +func RecordBulkElements(size float64, db, table string) { + dbBulkElements.WithLabelValues(db, table).Observe(size) +} + +var dbBulkInsertDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "db", + Name: "bulk_insert_duration_seconds", + Help: "A histogram displaying the duration of bulk inserts in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"db", "table"}, +) + +func RecordBulkInsertDuration(durMillis float64, db, table string) { + dbBulkInsertDuration.WithLabelValues(db, table).Observe(durMillis / 1000.0) +} + +var dbRequestDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "db", + Name: "request_duration_seconds", + Help: "A histogram displaying the duration of each sql request in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"method", "table"}, +) + +func RecordRequestDuration(durMillis float64, method, table string) { + dbRequestDuration.WithLabelValues(method, table).Observe(durMillis / 1000.0) +} + +var dbTotalRequests = prometheus.NewCounterVec( + prometheus.CounterOpts{ + Namespace: "db", + Name: "requests_total", + Help: "A counter showing the total number of all SQL requests.", + }, + []string{"method", "table"}, +) + +func IncreaseTotalRequests(method, table string) { + dbTotalRequests.WithLabelValues(method, table).Inc() +} + +func List() []prometheus.Collector { + return []prometheus.Collector{ + dbBatchSize, + dbBatchElements, + dbBatchInsertDuration, + dbBulkSize, + dbBulkElements, + dbBulkInsertDuration, + dbRequestDuration, + dbTotalRequests, + } +} diff --git a/backend/pkg/metrics/ender/metrics.go b/backend/pkg/metrics/ender/metrics.go new file mode 100644 index 000000000..5e3308554 --- /dev/null +++ b/backend/pkg/metrics/ender/metrics.go @@ -0,0 +1,51 @@ +package ender + +import "github.com/prometheus/client_golang/prometheus" + +var enderActiveSessions = prometheus.NewGauge( + prometheus.GaugeOpts{ + Namespace: "ender", + Name: "sessions_active", + Help: "A gauge displaying the number of active (live) sessions.", + }, +) + +func IncreaseActiveSessions() { + enderActiveSessions.Inc() +} + +func DecreaseActiveSessions() { + enderActiveSessions.Dec() +} + +var enderClosedSessions = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "ender", + Name: "sessions_closed", + Help: "A counter displaying the number of closed sessions (sent SessionEnd).", + }, +) + +func IncreaseClosedSessions() { + enderClosedSessions.Inc() +} + +var enderTotalSessions = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "ender", + Name: "sessions_total", + Help: "A counter displaying the number of all processed sessions.", + }, +) + +func IncreaseTotalSessions() { + enderTotalSessions.Inc() +} + +func List() []prometheus.Collector { + return []prometheus.Collector{ + enderActiveSessions, + enderClosedSessions, + enderTotalSessions, + } +} diff --git a/backend/pkg/metrics/http/metrics.go b/backend/pkg/metrics/http/metrics.go new file mode 100644 index 000000000..7a835d7f6 --- /dev/null +++ b/backend/pkg/metrics/http/metrics.go @@ -0,0 +1,55 @@ +package http + +import ( + "github.com/prometheus/client_golang/prometheus" + "openreplay/backend/pkg/metrics/common" + "strconv" +) + +var httpRequestSize = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "http", + Name: "request_size_bytes", + Help: "A histogram displaying the size of each HTTP request in bytes.", + Buckets: common.DefaultSizeBuckets, + }, + []string{"url", "response_code"}, +) + +func RecordRequestSize(size float64, url string, code int) { + httpRequestSize.WithLabelValues(url, strconv.Itoa(code)).Observe(size) +} + +var httpRequestDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "http", + Name: "request_duration_seconds", + Help: "A histogram displaying the duration of each HTTP request in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"url", "response_code"}, +) + +func RecordRequestDuration(durMillis float64, url string, code int) { + httpRequestDuration.WithLabelValues(url, strconv.Itoa(code)).Observe(durMillis / 1000.0) +} + +var httpTotalRequests = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "http", + Name: "requests_total", + Help: "A counter displaying the number all HTTP requests.", + }, +) + +func IncreaseTotalRequests() { + httpTotalRequests.Inc() +} + +func List() []prometheus.Collector { + return []prometheus.Collector{ + httpRequestSize, + httpRequestDuration, + httpTotalRequests, + } +} diff --git a/backend/pkg/metrics/server.go b/backend/pkg/metrics/server.go new file mode 100644 index 000000000..fb3be5afc --- /dev/null +++ b/backend/pkg/metrics/server.go @@ -0,0 +1,40 @@ +package metrics + +import ( + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/collectors" + "github.com/prometheus/client_golang/prometheus/promhttp" + "log" + "net/http" +) + +type MetricServer struct { + registry *prometheus.Registry +} + +func New() *MetricServer { + registry := prometheus.NewRegistry() + // Add go runtime metrics and process collectors. + registry.MustRegister( + collectors.NewGoCollector(), + collectors.NewProcessCollector(collectors.ProcessCollectorOpts{}), + ) + // Expose /metrics HTTP endpoint using the created custom registry. + http.Handle( + "/metrics", promhttp.HandlerFor( + registry, + promhttp.HandlerOpts{ + EnableOpenMetrics: true, + }), + ) + go func() { + log.Println(http.ListenAndServe(":8888", nil)) + }() + return &MetricServer{ + registry: registry, + } +} + +func (s *MetricServer) Register(cs []prometheus.Collector) { + s.registry.MustRegister(cs...) +} diff --git a/backend/pkg/metrics/sink/metrics.go b/backend/pkg/metrics/sink/metrics.go new file mode 100644 index 000000000..52cb73ba1 --- /dev/null +++ b/backend/pkg/metrics/sink/metrics.go @@ -0,0 +1,185 @@ +package sink + +import ( + "github.com/prometheus/client_golang/prometheus" + "openreplay/backend/pkg/metrics/common" +) + +var sinkMessageSize = prometheus.NewHistogram( + prometheus.HistogramOpts{ + Namespace: "sink", + Name: "message_size_bytes", + Help: "A histogram displaying the size of each message in bytes.", + Buckets: common.DefaultSizeBuckets, + }, +) + +func RecordMessageSize(size float64) { + sinkMessageSize.Observe(size) +} + +var sinkWrittenMessages = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "sink", + Name: "messages_written", + Help: "A counter displaying the total number of all written messages.", + }, +) + +func IncreaseWrittenMessages() { + sinkWrittenMessages.Inc() +} + +var sinkTotalMessages = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "sink", + Name: "messages_total", + Help: "A counter displaying the total number of all processed messages.", + }, +) + +func IncreaseTotalMessages() { + sinkTotalMessages.Inc() +} + +var sinkBatchSize = prometheus.NewHistogram( + prometheus.HistogramOpts{ + Namespace: "sink", + Name: "batch_size_bytes", + Help: "A histogram displaying the size of each batch in bytes.", + Buckets: common.DefaultSizeBuckets, + }, +) + +func RecordBatchSize(size float64) { + sinkBatchSize.Observe(size) +} + +var sinkTotalBatches = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "sink", + Name: "batches_total", + Help: "A counter displaying the total number of all written batches.", + }, +) + +func IncreaseTotalBatches() { + sinkTotalBatches.Inc() +} + +var sinkWrittenBytes = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "sink", + Name: "written_bytes", + Help: "A histogram displaying the size of buffer in bytes written to session file.", + Buckets: common.DefaultSizeBuckets, + }, + []string{"file_type"}, +) + +func RecordWrittenBytes(size float64, fileType string) { + if size == 0 { + return + } + sinkWrittenBytes.WithLabelValues(fileType).Observe(size) + IncreaseTotalWrittenBytes(size, fileType) +} + +var sinkTotalWrittenBytes = prometheus.NewCounterVec( + prometheus.CounterOpts{ + Namespace: "sink", + Name: "written_bytes_total", + Help: "A counter displaying the total number of bytes written to all session files.", + }, + []string{"file_type"}, +) + +func IncreaseTotalWrittenBytes(size float64, fileType string) { + if size == 0 { + return + } + sinkTotalWrittenBytes.WithLabelValues(fileType).Add(size) +} + +var sinkCachedAssets = prometheus.NewGauge( + prometheus.GaugeOpts{ + Namespace: "sink", + Name: "assets_cached", + Help: "A gauge displaying the current number of cached assets.", + }, +) + +func IncreaseCachedAssets() { + sinkCachedAssets.Inc() +} + +func DecreaseCachedAssets() { + sinkCachedAssets.Dec() +} + +var sinkSkippedAssets = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "sink", + Name: "assets_skipped", + Help: "A counter displaying the total number of all skipped assets.", + }, +) + +func IncreaseSkippedAssets() { + sinkSkippedAssets.Inc() +} + +var sinkTotalAssets = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "sink", + Name: "assets_total", + Help: "A counter displaying the total number of all processed assets.", + }, +) + +func IncreaseTotalAssets() { + sinkTotalAssets.Inc() +} + +var sinkAssetSize = prometheus.NewHistogram( + prometheus.HistogramOpts{ + Namespace: "sink", + Name: "asset_size_bytes", + Help: "A histogram displaying the size of each asset in bytes.", + Buckets: common.DefaultSizeBuckets, + }, +) + +func RecordAssetSize(size float64) { + sinkAssetSize.Observe(size) +} + +var sinkProcessAssetDuration = prometheus.NewHistogram( + prometheus.HistogramOpts{ + Namespace: "sink", + Name: "asset_process_duration_seconds", + Help: "A histogram displaying the duration of processing for each asset in seconds.", + Buckets: common.DefaultDurationBuckets, + }, +) + +func RecordProcessAssetDuration(durMillis float64) { + sinkProcessAssetDuration.Observe(durMillis / 1000.0) +} + +func List() []prometheus.Collector { + return []prometheus.Collector{ + sinkMessageSize, + sinkWrittenMessages, + sinkTotalMessages, + sinkBatchSize, + sinkTotalBatches, + sinkWrittenBytes, + sinkTotalWrittenBytes, + sinkCachedAssets, + sinkSkippedAssets, + sinkTotalAssets, + sinkAssetSize, + sinkProcessAssetDuration, + } +} diff --git a/backend/pkg/metrics/storage/metrics.go b/backend/pkg/metrics/storage/metrics.go new file mode 100644 index 000000000..26459c90d --- /dev/null +++ b/backend/pkg/metrics/storage/metrics.go @@ -0,0 +1,114 @@ +package storage + +import ( + "github.com/prometheus/client_golang/prometheus" + "openreplay/backend/pkg/metrics/common" +) + +var storageSessionSize = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "storage", + Name: "session_size_bytes", + Help: "A histogram displaying the size of each session file in bytes prior to any manipulation.", + Buckets: common.DefaultSizeBuckets, + }, + []string{"file_type"}, +) + +func RecordSessionSize(fileSize float64, fileType string) { + storageSessionSize.WithLabelValues(fileType).Observe(fileSize) +} + +var storageTotalSessions = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "storage", + Name: "sessions_total", + Help: "A counter displaying the total number of all processed sessions.", + }, +) + +func IncreaseStorageTotalSessions() { + storageTotalSessions.Inc() +} + +var storageSessionReadDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "storage", + Name: "read_duration_seconds", + Help: "A histogram displaying the duration of reading for each session in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"file_type"}, +) + +func RecordSessionReadDuration(durMillis float64, fileType string) { + storageSessionReadDuration.WithLabelValues(fileType).Observe(durMillis / 1000.0) +} + +var storageSessionSortDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "storage", + Name: "sort_duration_seconds", + Help: "A histogram displaying the duration of sorting for each session in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"file_type"}, +) + +func RecordSessionSortDuration(durMillis float64, fileType string) { + storageSessionSortDuration.WithLabelValues(fileType).Observe(durMillis / 1000.0) +} + +var storageSessionEncodeDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "storage", + Name: "encode_duration_seconds", + Help: "A histogram displaying the duration of encoding for each session in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"file_type"}, +) + +func RecordSessionEncodeDuration(durMillis float64, fileType string) { + storageSessionEncodeDuration.WithLabelValues(fileType).Observe(durMillis / 1000.0) +} + +var storageSessionCompressDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "storage", + Name: "compress_duration_seconds", + Help: "A histogram displaying the duration of compressing for each session in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"file_type"}, +) + +func RecordSessionCompressDuration(durMillis float64, fileType string) { + storageSessionCompressDuration.WithLabelValues(fileType).Observe(durMillis / 1000.0) +} + +var storageSessionUploadDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "storage", + Name: "upload_duration_seconds", + Help: "A histogram displaying the duration of uploading to s3 for each session in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"file_type"}, +) + +func RecordSessionUploadDuration(durMillis float64, fileType string) { + storageSessionUploadDuration.WithLabelValues(fileType).Observe(durMillis / 1000.0) +} + +func List() []prometheus.Collector { + return []prometheus.Collector{ + storageSessionSize, + storageTotalSessions, + storageSessionReadDuration, + storageSessionSortDuration, + storageSessionEncodeDuration, + storageSessionCompressDuration, + storageSessionUploadDuration, + } +} diff --git a/ee/backend/pkg/db/clickhouse/bulk.go b/ee/backend/pkg/db/clickhouse/bulk.go index 706b66f68..6eb8d98fd 100644 --- a/ee/backend/pkg/db/clickhouse/bulk.go +++ b/ee/backend/pkg/db/clickhouse/bulk.go @@ -5,6 +5,8 @@ import ( "errors" "fmt" "log" + "openreplay/backend/pkg/metrics/database" + "time" "github.com/ClickHouse/clickhouse-go/v2/lib/driver" ) @@ -16,19 +18,23 @@ type Bulk interface { type bulkImpl struct { conn driver.Conn + table string query string values [][]interface{} } -func NewBulk(conn driver.Conn, query string) (Bulk, error) { +func NewBulk(conn driver.Conn, table, query string) (Bulk, error) { switch { case conn == nil: return nil, errors.New("clickhouse connection is empty") + case table == "": + return nil, errors.New("table is empty") case query == "": return nil, errors.New("query is empty") } return &bulkImpl{ conn: conn, + table: table, query: query, values: make([][]interface{}, 0), }, nil @@ -40,6 +46,7 @@ func (b *bulkImpl) Append(args ...interface{}) error { } func (b *bulkImpl) Send() error { + start := time.Now() batch, err := b.conn.PrepareBatch(context.Background(), b.query) if err != nil { return fmt.Errorf("can't create new batch: %s", err) @@ -50,6 +57,11 @@ func (b *bulkImpl) Send() error { log.Printf("failed query: %s", b.query) } } + err = batch.Send() + // Save bulk metrics + database.RecordBulkElements(float64(len(b.values)), "ch", b.table) + database.RecordBulkInsertDuration(float64(time.Now().Sub(start).Milliseconds()), "ch", b.table) + // Prepare values slice for a new data b.values = make([][]interface{}, 0) - return batch.Send() + return err } diff --git a/ee/backend/pkg/db/clickhouse/connector.go b/ee/backend/pkg/db/clickhouse/connector.go index 157d384b9..b872adcc2 100644 --- a/ee/backend/pkg/db/clickhouse/connector.go +++ b/ee/backend/pkg/db/clickhouse/connector.go @@ -3,18 +3,16 @@ package clickhouse import ( "errors" "fmt" + "github.com/ClickHouse/clickhouse-go/v2" + "github.com/ClickHouse/clickhouse-go/v2/lib/driver" "log" "openreplay/backend/pkg/db/types" "openreplay/backend/pkg/hashid" "openreplay/backend/pkg/messages" "openreplay/backend/pkg/url" - "os" "strings" "time" - "github.com/ClickHouse/clickhouse-go/v2" - "github.com/ClickHouse/clickhouse-go/v2/lib/driver" - "openreplay/backend/pkg/license" ) @@ -52,28 +50,14 @@ type connectorImpl struct { finished chan struct{} } -// Check env variables. If not present, return default value. -func getEnv(key, fallback string) string { - if value, ok := os.LookupEnv(key); ok { - return value - } - return fallback -} - func NewConnector(url string) Connector { license.CheckLicense() - // Check username, password, database - userName := getEnv("CH_USERNAME", "default") - password := getEnv("CH_PASSWORD", "") - database := getEnv("CH_DATABASE", "default") url = strings.TrimPrefix(url, "tcp://") - url = strings.TrimSuffix(url, "/"+database) + url = strings.TrimSuffix(url, "/default") conn, err := clickhouse.Open(&clickhouse.Options{ Addr: []string{url}, Auth: clickhouse.Auth{ - Database: database, - Username: userName, - Password: password, + Database: "default", }, MaxOpenConns: 20, MaxIdleConns: 15, @@ -99,7 +83,7 @@ func NewConnector(url string) Connector { } func (c *connectorImpl) newBatch(name, query string) error { - batch, err := NewBulk(c.conn, query) + batch, err := NewBulk(c.conn, name, query) if err != nil { return fmt.Errorf("can't create new batch: %s", err) } From 3d9ea580a29609f9e065794279915d215c857de3 Mon Sep 17 00:00:00 2001 From: Alexander Zavorotynskiy Date: Mon, 20 Feb 2023 16:45:00 +0100 Subject: [PATCH 036/218] fix(backend): removed wrong line from batchSet --- backend/pkg/db/postgres/batches.go | 1 - 1 file changed, 1 deletion(-) diff --git a/backend/pkg/db/postgres/batches.go b/backend/pkg/db/postgres/batches.go index abdee36f2..8b9f2484d 100644 --- a/backend/pkg/db/postgres/batches.go +++ b/backend/pkg/db/postgres/batches.go @@ -186,7 +186,6 @@ func (conn *BatchSet) sendBatches(t *batchesTask) { } br.Close() // returns err database.RecordBatchInsertDuration(float64(time.Now().Sub(start).Milliseconds())) - database.IncreaseTotalBatches() } } From b813b50e017a68e873e1a338d36dab9097dba4dc Mon Sep 17 00:00:00 2001 From: Alexander Zavorotynskiy Date: Mon, 20 Feb 2023 16:56:00 +0100 Subject: [PATCH 037/218] fix(backend): upgrade /x/net library to avoid vulnerabilities --- backend/go.mod | 8 ++++---- backend/go.sum | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/backend/go.mod b/backend/go.mod index 0615fb0cb..e11b839fa 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -20,14 +20,14 @@ require ( github.com/klauspost/pgzip v1.2.5 github.com/oschwald/maxminddb-golang v1.7.0 github.com/pkg/errors v0.9.1 + github.com/prometheus/client_golang v1.12.1 github.com/sethvargo/go-envconfig v0.7.0 github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce github.com/ua-parser/uap-go v0.0.0-20200325213135-e1c09f13e2fe - go.opentelemetry.io/otel v1.7.0 go.opentelemetry.io/otel/exporters/prometheus v0.30.0 go.opentelemetry.io/otel/metric v0.30.0 go.opentelemetry.io/otel/sdk/metric v0.30.0 - golang.org/x/net v0.0.0-20220906165146-f3363e06e74c + golang.org/x/net v0.1.1-0.20221104162952-702349b0e862 google.golang.org/api v0.81.0 ) @@ -55,19 +55,19 @@ require ( github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect github.com/paulmach/orb v0.7.1 // indirect github.com/pierrec/lz4/v4 v4.1.15 // indirect - github.com/prometheus/client_golang v1.12.1 // indirect github.com/prometheus/client_model v0.2.0 // indirect github.com/prometheus/common v0.32.1 // indirect github.com/prometheus/procfs v0.7.3 // indirect github.com/shopspring/decimal v1.3.1 // indirect github.com/stretchr/testify v1.8.0 // indirect go.opencensus.io v0.23.0 // indirect + go.opentelemetry.io/otel v1.7.0 // indirect go.opentelemetry.io/otel/sdk v1.7.0 // indirect go.opentelemetry.io/otel/trace v1.7.0 // indirect golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4 // indirect golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 // indirect golang.org/x/sync v0.0.0-20220513210516-0976fa681c29 // indirect - golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 // indirect + golang.org/x/sys v0.1.0 // indirect golang.org/x/text v0.4.0 // indirect golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df // indirect google.golang.org/appengine v1.6.7 // indirect diff --git a/backend/go.sum b/backend/go.sum index 5aa3ae3de..7b33d881d 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -601,8 +601,8 @@ golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220906165146-f3363e06e74c h1:yKufUcDwucU5urd+50/Opbt4AYpqthk7wHpHok8f1lo= -golang.org/x/net v0.0.0-20220906165146-f3363e06e74c/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/net v0.1.1-0.20221104162952-702349b0e862 h1:KrLJ+iz8J6j6VVr/OCfULAcK+xozUmWE43fKpMR4MlI= +golang.org/x/net v0.1.1-0.20221104162952-702349b0e862/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -715,8 +715,8 @@ golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220429233432-b5fbb4746d32/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 h1:WIoqL4EROvwiPdUtaip4VcDdpZ4kha7wBWZrbVKCIZg= -golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0 h1:kunALQeHf1/185U1i0GOB/fy1IPRDDpuoOOqRReG57U= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= From dab2707107c1402ec7c4a03e94dd6233d55f5b5d Mon Sep 17 00:00:00 2001 From: Alexander Zavorotynskiy Date: Mon, 20 Feb 2023 16:59:57 +0100 Subject: [PATCH 038/218] feat(backend): clean up go modules --- backend/go.mod | 6 -- backend/go.sum | 13 --- backend/pkg/monitoring/metrics.go | 138 ------------------------------ 3 files changed, 157 deletions(-) delete mode 100644 backend/pkg/monitoring/metrics.go diff --git a/backend/go.mod b/backend/go.mod index e11b839fa..161513ed8 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -24,9 +24,6 @@ require ( github.com/sethvargo/go-envconfig v0.7.0 github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce github.com/ua-parser/uap-go v0.0.0-20200325213135-e1c09f13e2fe - go.opentelemetry.io/otel/exporters/prometheus v0.30.0 - go.opentelemetry.io/otel/metric v0.30.0 - go.opentelemetry.io/otel/sdk/metric v0.30.0 golang.org/x/net v0.1.1-0.20221104162952-702349b0e862 google.golang.org/api v0.81.0 ) @@ -38,8 +35,6 @@ require ( cloud.google.com/go/storage v1.14.0 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/cespare/xxhash/v2 v2.1.2 // indirect - github.com/go-logr/logr v1.2.3 // indirect - github.com/go-logr/stdr v1.2.2 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.2 // indirect github.com/google/go-cmp v0.5.8 // indirect @@ -62,7 +57,6 @@ require ( github.com/stretchr/testify v1.8.0 // indirect go.opencensus.io v0.23.0 // indirect go.opentelemetry.io/otel v1.7.0 // indirect - go.opentelemetry.io/otel/sdk v1.7.0 // indirect go.opentelemetry.io/otel/trace v1.7.0 // indirect golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4 // indirect golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 // indirect diff --git a/backend/go.sum b/backend/go.sum index 7b33d881d..de6d507d3 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -80,8 +80,6 @@ github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk5 github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/aws/aws-sdk-go v1.44.98 h1:fX+NxebSdO/9T6DTNOLhpC+Vv6RNkKRfsMg0a7o/yBo= github.com/aws/aws-sdk-go v1.44.98/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= -github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A= -github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= @@ -156,9 +154,7 @@ github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9 github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-ole/go-ole v1.2.4/go.mod h1:XCwSNxSkXRo4vlyPy93sltvi/qJq0jqQhjqQNIwKuxM= github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= @@ -489,14 +485,6 @@ go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opentelemetry.io/otel v1.7.0 h1:Z2lA3Tdch0iDcrhJXDIlC94XE+bxok1F9B+4Lz/lGsM= go.opentelemetry.io/otel v1.7.0/go.mod h1:5BdUoMIz5WEs0vt0CUEMtSSaTSHBBVwrhnz7+nrD5xk= -go.opentelemetry.io/otel/exporters/prometheus v0.30.0 h1:YXo5ZY5nofaEYMCMTTMaRH2cLDZB8+0UGuk5RwMfIo0= -go.opentelemetry.io/otel/exporters/prometheus v0.30.0/go.mod h1:qN5feW+0/d661KDtJuATEmHtw5bKBK7NSvNEP927zSs= -go.opentelemetry.io/otel/metric v0.30.0 h1:Hs8eQZ8aQgs0U49diZoaS6Uaxw3+bBE3lcMUKBFIk3c= -go.opentelemetry.io/otel/metric v0.30.0/go.mod h1:/ShZ7+TS4dHzDFmfi1kSXMhMVubNoP0oIaBp70J6UXU= -go.opentelemetry.io/otel/sdk v1.7.0 h1:4OmStpcKVOfvDOgCt7UriAPtKolwIhxpnSNI/yK+1B0= -go.opentelemetry.io/otel/sdk v1.7.0/go.mod h1:uTEOTwaqIVuTGiJN7ii13Ibp75wJmYUDe374q6cZwUU= -go.opentelemetry.io/otel/sdk/metric v0.30.0 h1:XTqQ4y3erR2Oj8xSAOL5ovO5011ch2ELg51z4fVkpME= -go.opentelemetry.io/otel/sdk/metric v0.30.0/go.mod h1:8AKFRi5HyvTR0RRty3paN1aMC9HMT+NzcEhw/BLkLX8= go.opentelemetry.io/otel/trace v1.7.0 h1:O37Iogk1lEkMRXewVtZ1BBTVn5JEp8GrJvP92bJqC6o= go.opentelemetry.io/otel/trace v1.7.0/go.mod h1:fzLSB9nqR2eXzxPXb2JW9IKE+ScyXA48yyE4TNvoHqU= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= @@ -690,7 +678,6 @@ golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210503080704-8803ae5d1324/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= diff --git a/backend/pkg/monitoring/metrics.go b/backend/pkg/monitoring/metrics.go deleted file mode 100644 index 803fba127..000000000 --- a/backend/pkg/monitoring/metrics.go +++ /dev/null @@ -1,138 +0,0 @@ -package monitoring - -import ( - "fmt" - "log" - "net/http" - - "go.opentelemetry.io/otel/exporters/prometheus" - "go.opentelemetry.io/otel/metric" - "go.opentelemetry.io/otel/metric/global" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" - "go.opentelemetry.io/otel/sdk/metric/aggregator/histogram" - controller "go.opentelemetry.io/otel/sdk/metric/controller/basic" - "go.opentelemetry.io/otel/sdk/metric/export/aggregation" - processor "go.opentelemetry.io/otel/sdk/metric/processor/basic" - selector "go.opentelemetry.io/otel/sdk/metric/selector/simple" -) - -// Metrics stores all collected metrics -type Metrics struct { - meter metric.Meter - counters map[string]syncfloat64.Counter - upDownCounters map[string]syncfloat64.UpDownCounter - histograms map[string]syncfloat64.Histogram -} - -func New(name string) *Metrics { - m := &Metrics{ - counters: make(map[string]syncfloat64.Counter), - upDownCounters: make(map[string]syncfloat64.UpDownCounter), - histograms: make(map[string]syncfloat64.Histogram), - } - m.initPrometheusDataExporter() - m.initMetrics(name) - return m -} - -// initPrometheusDataExporter allows to use collected metrics in prometheus -func (m *Metrics) initPrometheusDataExporter() { - config := prometheus.Config{ - DefaultHistogramBoundaries: []float64{1, 2, 5, 10, 20, 50, 100, 250, 500, 1000}, - } - c := controller.New( - processor.NewFactory( - selector.NewWithHistogramDistribution( - histogram.WithExplicitBoundaries(config.DefaultHistogramBoundaries), - ), - aggregation.CumulativeTemporalitySelector(), - processor.WithMemory(true), - ), - ) - exporter, err := prometheus.New(config, c) - if err != nil { - log.Panicf("failed to initialize prometheus exporter %v", err) - } - - global.SetMeterProvider(exporter.MeterProvider()) - - http.HandleFunc("/metrics", exporter.ServeHTTP) - go func() { - _ = http.ListenAndServe(":8888", nil) - }() - - fmt.Println("Prometheus server running on :8888") -} - -func (m *Metrics) initMetrics(name string) { - m.meter = global.Meter(name) -} - -/* -Counter is a synchronous instrument that measures additive non-decreasing values, for example, the number of: -- processed requests -- received bytes -- disk reads -*/ - -func (m *Metrics) RegisterCounter(name string) (syncfloat64.Counter, error) { - if counter, ok := m.counters[name]; ok { - return counter, nil - } - counter, err := m.meter.SyncFloat64().Counter(name) - if err != nil { - return nil, fmt.Errorf("failed to initialize counter: %v", err) - } - m.counters[name] = counter - return counter, nil -} - -func (m *Metrics) GetCounter(name string) syncfloat64.Counter { - return m.counters[name] -} - -/* -UpDownCounter is a synchronous instrument which measures additive values that increase or decrease with time, -for example, the number of: -- active requests -- open connections -- memory in use (megabytes) -*/ - -func (m *Metrics) RegisterUpDownCounter(name string) (syncfloat64.UpDownCounter, error) { - if counter, ok := m.upDownCounters[name]; ok { - return counter, nil - } - counter, err := m.meter.SyncFloat64().UpDownCounter(name) - if err != nil { - return nil, fmt.Errorf("failed to initialize upDownCounter: %v", err) - } - m.upDownCounters[name] = counter - return counter, nil -} - -func (m *Metrics) GetUpDownCounter(name string) syncfloat64.UpDownCounter { - return m.upDownCounters[name] -} - -/* -Histogram is a synchronous instrument that produces a histogram from recorded values, for example: -- request latency -- request size -*/ - -func (m *Metrics) RegisterHistogram(name string) (syncfloat64.Histogram, error) { - if hist, ok := m.histograms[name]; ok { - return hist, nil - } - hist, err := m.meter.SyncFloat64().Histogram(name) - if err != nil { - return nil, fmt.Errorf("failed to initialize histogram: %v", err) - } - m.histograms[name] = hist - return hist, nil -} - -func (m *Metrics) GetHistogram(name string) syncfloat64.Histogram { - return m.histograms[name] -} From a6864a5c8eb24d7d79e6b6e8f5f927c6b19fa1ef Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Mon, 20 Feb 2023 17:24:09 +0100 Subject: [PATCH 039/218] fix(ui): change clickmap fetch filter --- frontend/app/services/MetricService.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/services/MetricService.ts b/frontend/app/services/MetricService.ts index d8c6e099a..5b97ec4ec 100644 --- a/frontend/app/services/MetricService.ts +++ b/frontend/app/services/MetricService.ts @@ -75,7 +75,7 @@ export default class MetricService { getMetricChartData(metric: Widget, data: any, isWidget: boolean = false): Promise { if ( metric.metricType === CLICKMAP - && document.location.pathname.split('/').pop() !== 'metrics' + && document.location.pathname.split('/').pop() === 'metrics' && (document.location.pathname.indexOf('dashboard') !== -1 && document.location.pathname.indexOf('metric') === -1) ) { return Promise.resolve({}) From 11388bccec76c828aa23ea9ac21ed3765eb3b691 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 20 Feb 2023 18:02:14 +0100 Subject: [PATCH 040/218] fix(ui) - search url unhandled filter key --- .../Filters/FilterSource/FilterSource.tsx | 2 +- frontend/app/types/filter/newFilter.js | 1 + frontend/app/utils/search.ts | 17 ++++++++++++----- 3 files changed, 14 insertions(+), 6 deletions(-) diff --git a/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx b/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx index 08c93d8df..7ae8d3a92 100644 --- a/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx +++ b/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx @@ -9,7 +9,7 @@ interface Props { } function FilterSource(props: Props) { const { filter } = props; - const [value, setValue] = useState(filter.source[0] || ''); + const [value, setValue] = useState(filter.source && filter.source[0] ? filter.source[0] : ''); useEffect(() => { setValue(filter.source[0] || ''); diff --git a/frontend/app/types/filter/newFilter.js b/frontend/app/types/filter/newFilter.js index 7a612c193..286f1cc13 100644 --- a/frontend/app/types/filter/newFilter.js +++ b/frontend/app/types/filter/newFilter.js @@ -195,6 +195,7 @@ export default Record({ _filter = filtersMap[type]; } } + return { ..._filter, ...filter, diff --git a/frontend/app/utils/search.ts b/frontend/app/utils/search.ts index 017a5a7f6..d688ee369 100644 --- a/frontend/app/utils/search.ts +++ b/frontend/app/utils/search.ts @@ -58,9 +58,6 @@ const getFiltersFromEntries = (entires: any) => { let filter: any = {}; const filterKey = getFilterKeyTypeByKey(item.key); - if (!filterKey) { - return; - } const tmp = item.value.split('^'); const valueArr = tmp[0].split('|'); const operator = valueArr.shift(); @@ -78,10 +75,20 @@ const getFiltersFromEntries = (entires: any) => { } } + if (!filter) { + return + } + filter.value = valueArr; filter.operator = operator; - filter.source = sourceArr && sourceArr.length > 0 ? sourceArr : null; - filter.sourceOperator = !!sourceOperator ? decodeURI(sourceOperator) : null; + if (filter.icon === "filters/metadata") { + filter.source = filter.type; + filter.type = 'metadata'; + } else { + filter.source = sourceArr && sourceArr.length > 0 ? sourceArr : null; + filter.sourceOperator = !!sourceOperator ? decodeURI(sourceOperator) : null; + } + if (!filter.filters || filter.filters.size === 0) { filters.push(filter); } From de9b14c80dd0bd4145cafdbe287f6624df48108e Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 20 Feb 2023 18:26:40 +0100 Subject: [PATCH 041/218] fix(ui) - search url unhandled filter key --- .../app/components/shared/Filters/FilterSource/FilterSource.tsx | 2 +- frontend/app/utils/search.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx b/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx index 7ae8d3a92..07ca61ec3 100644 --- a/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx +++ b/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx @@ -12,7 +12,7 @@ function FilterSource(props: Props) { const [value, setValue] = useState(filter.source && filter.source[0] ? filter.source[0] : ''); useEffect(() => { - setValue(filter.source[0] || ''); + setValue(filter.source && filter.source[0] ? filter.source[0] : ''); }, [filter]); const write = ({ target: { value, name } }: any) => setValue(value); diff --git a/frontend/app/utils/search.ts b/frontend/app/utils/search.ts index d688ee369..4b32f8d13 100644 --- a/frontend/app/utils/search.ts +++ b/frontend/app/utils/search.ts @@ -13,7 +13,7 @@ export const createUrlQuery = (filter: any) => { let str = `${f.operator}|${f.value.join('|')}`; if (f.hasSource) { - str = `${str}^${f.sourceOperator}|${f.source.join('|')}`; + str = `${str}^${f.sourceOperator ? f.sourceOperator : ''}|${f.source ? f.source.join('|') : ''}`; } let key: any = setQueryParamKeyFromFilterkey(f.key); From 2162fd0f4d98574e346cea31160a444758ca796c Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 20 Feb 2023 18:27:45 +0100 Subject: [PATCH 042/218] fix(ui) - modal scroll issue --- frontend/app/components/ui/Modal/Modal.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frontend/app/components/ui/Modal/Modal.tsx b/frontend/app/components/ui/Modal/Modal.tsx index 89ba9f5d9..c489aa216 100644 --- a/frontend/app/components/ui/Modal/Modal.tsx +++ b/frontend/app/components/ui/Modal/Modal.tsx @@ -13,7 +13,8 @@ function Modal(props: Props) { useEffect(() => { if (open) { document.body.style.overflow = 'hidden'; - } else { + } + return () => { document.body.style.overflow = 'auto'; } }, [open]); From 915399855a76d1e99efe93bdb7b223134aedf815 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 20 Feb 2023 18:37:39 +0100 Subject: [PATCH 043/218] fix(ui) - card sessions pagination reset --- .../Dashboard/components/WidgetSessions/WidgetSessions.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx b/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx index 4052e7a7e..f563d688e 100644 --- a/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx +++ b/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx @@ -94,7 +94,7 @@ function WidgetSessions(props: Props) { useEffect(() => { metricStore.updateKey('sessionsPage', 1); loadData(); - }, [filter.startTimestamp, filter.endTimestamp, filter.filters, depsString, metricStore.clickMapSearch]); + }, [filter.startTimestamp, filter.endTimestamp, filter.filters, depsString, metricStore.clickMapSearch, activeSeries]); useEffect(loadData, [metricStore.sessionsPage]); return ( From 12fb774bd72109bef6f09d5a654855f29f14b321 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 20 Feb 2023 18:46:52 +0100 Subject: [PATCH 044/218] fix(ui) - filters z-index that causing depth issue --- .../Filters/FilterValueDropdown/FilterValueDropdown.module.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/shared/Filters/FilterValueDropdown/FilterValueDropdown.module.css b/frontend/app/components/shared/Filters/FilterValueDropdown/FilterValueDropdown.module.css index b0ca01016..6e34010b3 100644 --- a/frontend/app/components/shared/Filters/FilterValueDropdown/FilterValueDropdown.module.css +++ b/frontend/app/components/shared/Filters/FilterValueDropdown/FilterValueDropdown.module.css @@ -6,7 +6,7 @@ align-items: center; height: 26px; width: 100%; - z-index: 3; + /* z-index: 3; TODO this has to be fixed in clickmaps @Nikita */ & .right { height: 24px; From d7ec5a81b258c25d040ce3b9940dbcdb2a621ee2 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Tue, 21 Feb 2023 10:33:11 +0100 Subject: [PATCH 045/218] fix(player): dont load devtools for clickmaps, fix scrolling overflow --- frontend/app/player/web/MessageManager.ts | 5 +++-- frontend/app/player/web/Screen/Screen.ts | 7 +++---- frontend/app/player/web/WebPlayer.ts | 2 +- frontend/app/player/web/addons/TargetMarker.ts | 2 -- 4 files changed, 7 insertions(+), 9 deletions(-) diff --git a/frontend/app/player/web/MessageManager.ts b/frontend/app/player/web/MessageManager.ts index d0ae18020..68ef0cbf8 100644 --- a/frontend/app/player/web/MessageManager.ts +++ b/frontend/app/player/web/MessageManager.ts @@ -193,9 +193,9 @@ export default class MessageManager { // this.state.update({ filesLoaded: true }) } - async loadMessages() { + async loadMessages(isClickmap: boolean = false) { this.setMessagesLoading(true) - // TODO: reuseable decryptor instance + // TODO: reusable decryptor instance const createNewParser = (shouldDecrypt = true) => { const decrypt = shouldDecrypt && this.session.fileKey ? (b: Uint8Array) => decryptSessionBytes(b, this.session.fileKey) @@ -233,6 +233,7 @@ export default class MessageManager { .finally(this.onFileReadFinally); // load devtools (TODO: start after the first DOM file download) + if (isClickmap) return; this.state.update({ devtoolsLoading: true }) loadFiles(this.session.devtoolsURL, createNewParser()) // EFS fallback diff --git a/frontend/app/player/web/Screen/Screen.ts b/frontend/app/player/web/Screen/Screen.ts index b095385b1..cca56d402 100644 --- a/frontend/app/player/web/Screen/Screen.ts +++ b/frontend/app/player/web/Screen/Screen.ts @@ -213,11 +213,12 @@ export default class Screen { case ScaleMode.Embed: this.scaleRatio = Math.min(offsetWidth / width, offsetHeight / height) translate = "translate(-50%, -50%)" + posStyles = { height: height + 'px' } break; case ScaleMode.AdjustParentHeight: this.scaleRatio = offsetWidth / width translate = "translate(-50%, 0)" - posStyles = { top: 0 } + posStyles = { top: 0, height: this.document!.documentElement.getBoundingClientRect().height + 'px', } break; } @@ -232,13 +233,11 @@ export default class Screen { } Object.assign(this.screen.style, posStyles, { - height: height + 'px', width: width + 'px', transform: `scale(${this.scaleRatio}) ${translate}`, }) - Object.assign(this.iframe.style, { + Object.assign(this.iframe.style, posStyles, { width: width + 'px', - height: height + 'px', }) this.boundingRect = this.overlay.getBoundingClientRect(); diff --git a/frontend/app/player/web/WebPlayer.ts b/frontend/app/player/web/WebPlayer.ts index c4da835ff..d94d10beb 100644 --- a/frontend/app/player/web/WebPlayer.ts +++ b/frontend/app/player/web/WebPlayer.ts @@ -46,7 +46,7 @@ export default class WebPlayer extends Player { this.screen = screen this.messageManager = messageManager if (!live) { // hack. TODO: split OfflinePlayer class - messageManager.loadMessages() + void messageManager.loadMessages(isClickMap) } this.targetMarker = new TargetMarker(this.screen, wpState) diff --git a/frontend/app/player/web/addons/TargetMarker.ts b/frontend/app/player/web/addons/TargetMarker.ts index c9315f01b..6629ceaec 100644 --- a/frontend/app/player/web/addons/TargetMarker.ts +++ b/frontend/app/player/web/addons/TargetMarker.ts @@ -161,7 +161,6 @@ export default class TargetMarker { const scaleRatio = this.screen.getScale() Object.assign(overlay.style, clickmapStyles.overlayStyle({ height: iframeSize.height, width: iframeSize.width, scale: scaleRatio })) - console.log(selections) this.clickMapOverlay = overlay selections.forEach((s, i) => { const el = this.screen.getElementBySelector(s.selector); @@ -189,7 +188,6 @@ export default class TargetMarker { const border = document.createElement("div") - let key = 0 if (width > 50) { From ec382279b8228be24f93f4d2f6b3239dcb96937a Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Tue, 21 Feb 2023 10:30:54 +0000 Subject: [PATCH 046/218] Updating parallel script --- scripts/helmcharts/build_deploy_parallel.sh | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/scripts/helmcharts/build_deploy_parallel.sh b/scripts/helmcharts/build_deploy_parallel.sh index 38c1633bb..268811a34 100644 --- a/scripts/helmcharts/build_deploy_parallel.sh +++ b/scripts/helmcharts/build_deploy_parallel.sh @@ -8,6 +8,12 @@ set -e # Removing local alpine:latest image docker rmi alpine || true +# Signing image +# cosign sign --key awskms:///alias/openreplay-container-sign image_url:tag +export SIGN_IMAGE=1 +export PUSH_IMAGE=1 +export AWS_DEFAULT_REGION="eu-central-1" +export SIGN_KEY="awskms:///alias/openreplay-container-sign" echo $DOCKER_REPO [[ -z DOCKER_REPO ]] && { echo Set DOCKER_REPO="your docker registry" @@ -22,9 +28,9 @@ echo $DOCKER_REPO tmux split-window "cd ../../frontend && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build.sh $@" tmux select-layout tiled tmux split-window "cd ../../sourcemap-reader && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build.sh $@" - tmux split-window "cd ../../api && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build.sh $@" - tmux split-window "cd ../../api && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build_alerts.sh $@" - tmux split-window "cd ../../api && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build_crons.sh $@" + tmux split-window "cd ../../api && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build.sh $@ + && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build_alerts.sh $@ + && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build_crons.sh $@" tmux select-layout tiled } From 92674c3e6df95e1c25d50affd701c4e23201c739 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Tue, 21 Feb 2023 11:32:54 +0100 Subject: [PATCH 047/218] chore(build): ignoring ee folder for sourcemap-reader build Signed-off-by: rjshrjndrn --- sourcemap-reader/build.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sourcemap-reader/build.sh b/sourcemap-reader/build.sh index 859347fd4..fbe8762e2 100644 --- a/sourcemap-reader/build.sh +++ b/sourcemap-reader/build.sh @@ -34,7 +34,7 @@ function build_api(){ tag="" # Copy enterprise code [[ $1 == "ee" ]] && { - cp -rf ../ee/sourcemap-reader/* ./ + cp -rf ../ee/sourcemap-reader/* ./ || true # We share same codebase for ee/foss envarg="default-ee" tag="ee-" } From 967134c280f15eb4a65781f9af8d15bb783e2e5a Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Mon, 20 Feb 2023 15:30:45 +0100 Subject: [PATCH 048/218] fix(ui): keep share message after sharing --- frontend/app/components/shared/SharePopup/SharePopup.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/app/components/shared/SharePopup/SharePopup.js b/frontend/app/components/shared/SharePopup/SharePopup.js index 984ce0060..07726d14a 100644 --- a/frontend/app/components/shared/SharePopup/SharePopup.js +++ b/frontend/app/components/shared/SharePopup/SharePopup.js @@ -80,8 +80,8 @@ export default class SharePopup extends React.PureComponent { handleSuccess = (endpoint) => { const obj = endpoint === 'Slack' - ? { isOpen: false, comment: '', loadingSlack: false } - : { isOpen: false, comment: '', loadingTeams: false }; + ? { loadingSlack: false } + : { loadingTeams: false }; this.setState(obj); toast.success(`Sent to ${endpoint}.`); }; @@ -109,7 +109,7 @@ export default class SharePopup extends React.PureComponent { return ( this.setState({ isOpen: true })} - onClose={() => this.setState({ isOpen: false })} + onClose={() => this.setState({ isOpen: false, comment: '' })} render={() => (
{this.state.loadingTeams || this.state.loadingSlack ? ( From 022cb65314a5cde160a6ee261d5cecf725c2da08 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 20 Feb 2023 16:37:19 +0100 Subject: [PATCH 049/218] fix(ui) - cards list filter by dashboard --- .../components/Dashboard/components/MetricsList/MetricsList.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Dashboard/components/MetricsList/MetricsList.tsx b/frontend/app/components/Dashboard/components/MetricsList/MetricsList.tsx index f2639d37f..1b9f7dfc9 100644 --- a/frontend/app/components/Dashboard/components/MetricsList/MetricsList.tsx +++ b/frontend/app/components/Dashboard/components/MetricsList/MetricsList.tsx @@ -21,7 +21,7 @@ function MetricsList({ const dashboard = dashboardStore.selectedDashboard; const existingCardIds = useMemo(() => dashboard?.widgets?.map(i => parseInt(i.metricId)), [dashboard]); - const cards = useMemo(() => metricStore.filteredCards.filter(i => !existingCardIds?.includes(parseInt(i.metricId))), [metricStore.filteredCards]); + const cards = useMemo(() => !!onSelectionChange ? metricStore.filteredCards.filter(i => !existingCardIds?.includes(parseInt(i.metricId))) : metricStore.filteredCards, [metricStore.filteredCards]); useEffect(() => { metricStore.fetchList(); From 7a7910c4fd9f2f2b9d2e68a9ba487246a073ab01 Mon Sep 17 00:00:00 2001 From: Alexander Date: Mon, 20 Feb 2023 16:37:55 +0100 Subject: [PATCH 050/218] Draft: New metrics module (#982) * feat(backend): created new metrics module --- backend/cmd/assets/main.go | 19 +- backend/cmd/db/main.go | 13 +- backend/cmd/ender/main.go | 17 +- backend/cmd/http/main.go | 23 ++- backend/cmd/integrations/main.go | 18 +- backend/cmd/sink/main.go | 36 ++-- backend/cmd/storage/main.go | 15 +- backend/internal/assets/cacher/cacher.go | 47 ++--- backend/internal/http/router/handlers-ios.go | 37 ++-- backend/internal/http/router/handlers-web.go | 65 +++---- backend/internal/http/router/handlers.go | 6 +- backend/internal/http/router/response.go | 33 +++- backend/internal/http/router/router.go | 44 +---- backend/internal/sessionender/ender.go | 45 ++--- backend/internal/sink/assetscache/assets.go | 82 +++----- backend/internal/storage/storage.go | 143 ++++---------- backend/pkg/db/postgres/batches.go | 65 ++----- backend/pkg/db/postgres/bulk.go | 55 ++---- backend/pkg/db/postgres/bulks.go | 33 ++-- backend/pkg/db/postgres/connector.go | 49 +---- backend/pkg/db/postgres/pool.go | 73 +++----- backend/pkg/messages/iterator-sink.go | 3 + backend/pkg/messages/iterator.go | 5 +- backend/pkg/metrics/assets/metrics.go | 72 ++++++++ backend/pkg/metrics/common/metrics.go | 11 ++ backend/pkg/metrics/database/metrics.go | 127 +++++++++++++ backend/pkg/metrics/ender/metrics.go | 51 +++++ backend/pkg/metrics/http/metrics.go | 55 ++++++ backend/pkg/metrics/server.go | 40 ++++ backend/pkg/metrics/sink/metrics.go | 185 +++++++++++++++++++ backend/pkg/metrics/storage/metrics.go | 114 ++++++++++++ ee/backend/pkg/db/clickhouse/bulk.go | 16 +- ee/backend/pkg/db/clickhouse/connector.go | 26 +-- 33 files changed, 1021 insertions(+), 602 deletions(-) create mode 100644 backend/pkg/metrics/assets/metrics.go create mode 100644 backend/pkg/metrics/common/metrics.go create mode 100644 backend/pkg/metrics/database/metrics.go create mode 100644 backend/pkg/metrics/ender/metrics.go create mode 100644 backend/pkg/metrics/http/metrics.go create mode 100644 backend/pkg/metrics/server.go create mode 100644 backend/pkg/metrics/sink/metrics.go create mode 100644 backend/pkg/metrics/storage/metrics.go diff --git a/backend/cmd/assets/main.go b/backend/cmd/assets/main.go index b41dedd87..b05ecbe52 100644 --- a/backend/cmd/assets/main.go +++ b/backend/cmd/assets/main.go @@ -1,9 +1,7 @@ package main import ( - "context" "log" - "openreplay/backend/pkg/pprof" "os" "os/signal" "syscall" @@ -13,12 +11,16 @@ import ( "openreplay/backend/internal/assets/cacher" config "openreplay/backend/internal/config/assets" "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/monitoring" + "openreplay/backend/pkg/metrics" + assetsMetrics "openreplay/backend/pkg/metrics/assets" + "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" ) func main() { - metrics := monitoring.New("assets") + m := metrics.New() + m.Register(assetsMetrics.List()) + log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := config.New() @@ -26,18 +28,13 @@ func main() { pprof.StartProfilingServer() } - cacher := cacher.NewCacher(cfg, metrics) - - totalAssets, err := metrics.RegisterCounter("assets_total") - if err != nil { - log.Printf("can't create assets_total metric: %s", err) - } + cacher := cacher.NewCacher(cfg) msgHandler := func(msg messages.Message) { switch m := msg.(type) { case *messages.AssetCache: cacher.CacheURL(m.SessionID(), m.URL) - totalAssets.Add(context.Background(), 1) + assetsMetrics.IncreaseProcessesSessions() // TODO: connect to "raw" topic in order to listen for JSException case *messages.JSException: sourceList, err := assets.ExtractJSExceptionSources(&m.Payload) diff --git a/backend/cmd/db/main.go b/backend/cmd/db/main.go index f9440a908..84b0d81ed 100644 --- a/backend/cmd/db/main.go +++ b/backend/cmd/db/main.go @@ -3,8 +3,6 @@ package main import ( "errors" "log" - types2 "openreplay/backend/pkg/db/types" - "openreplay/backend/pkg/pprof" "os" "os/signal" "syscall" @@ -14,16 +12,21 @@ import ( "openreplay/backend/internal/db/datasaver" "openreplay/backend/pkg/db/cache" "openreplay/backend/pkg/db/postgres" + types2 "openreplay/backend/pkg/db/types" "openreplay/backend/pkg/handlers" custom2 "openreplay/backend/pkg/handlers/custom" "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/monitoring" + "openreplay/backend/pkg/metrics" + databaseMetrics "openreplay/backend/pkg/metrics/database" + "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/sessions" ) func main() { - metrics := monitoring.New("db") + m := metrics.New() + m.Register(databaseMetrics.List()) + log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := db.New() @@ -33,7 +36,7 @@ func main() { // Init database pg := cache.NewPGCache( - postgres.NewConn(cfg.Postgres.String(), cfg.BatchQueueLimit, cfg.BatchSizeLimit, metrics), cfg.ProjectExpirationTimeoutMs) + postgres.NewConn(cfg.Postgres.String(), cfg.BatchQueueLimit, cfg.BatchSizeLimit), cfg.ProjectExpirationTimeoutMs) defer pg.Close() // HandlersFabric returns the list of message handlers we want to be applied to each incoming message. diff --git a/backend/cmd/ender/main.go b/backend/cmd/ender/main.go index 74b0b8bd2..da7ca9b89 100644 --- a/backend/cmd/ender/main.go +++ b/backend/cmd/ender/main.go @@ -2,8 +2,6 @@ package main import ( "log" - "openreplay/backend/internal/storage" - "openreplay/backend/pkg/pprof" "os" "os/signal" "strings" @@ -12,16 +10,23 @@ import ( "openreplay/backend/internal/config/ender" "openreplay/backend/internal/sessionender" + "openreplay/backend/internal/storage" "openreplay/backend/pkg/db/cache" "openreplay/backend/pkg/db/postgres" "openreplay/backend/pkg/intervals" "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/monitoring" + "openreplay/backend/pkg/metrics" + databaseMetrics "openreplay/backend/pkg/metrics/database" + enderMetrics "openreplay/backend/pkg/metrics/ender" + "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" ) func main() { - metrics := monitoring.New("ender") + m := metrics.New() + m.Register(enderMetrics.List()) + m.Register(databaseMetrics.List()) + log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := ender.New() @@ -29,10 +34,10 @@ func main() { pprof.StartProfilingServer() } - pg := cache.NewPGCache(postgres.NewConn(cfg.Postgres.String(), 0, 0, metrics), cfg.ProjectExpirationTimeoutMs) + pg := cache.NewPGCache(postgres.NewConn(cfg.Postgres.String(), 0, 0), cfg.ProjectExpirationTimeoutMs) defer pg.Close() - sessions, err := sessionender.New(metrics, intervals.EVENTS_SESSION_END_TIMEOUT, cfg.PartitionsNumber) + sessions, err := sessionender.New(intervals.EVENTS_SESSION_END_TIMEOUT, cfg.PartitionsNumber) if err != nil { log.Printf("can't init ender service: %s", err) return diff --git a/backend/cmd/http/main.go b/backend/cmd/http/main.go index 4fb82b635..83eedaf29 100644 --- a/backend/cmd/http/main.go +++ b/backend/cmd/http/main.go @@ -2,23 +2,28 @@ package main import ( "log" - "openreplay/backend/internal/config/http" - "openreplay/backend/internal/http/router" - "openreplay/backend/internal/http/server" - "openreplay/backend/internal/http/services" - "openreplay/backend/pkg/monitoring" - "openreplay/backend/pkg/pprof" "os" "os/signal" "syscall" + "openreplay/backend/internal/config/http" + "openreplay/backend/internal/http/router" + "openreplay/backend/internal/http/server" + "openreplay/backend/internal/http/services" "openreplay/backend/pkg/db/cache" "openreplay/backend/pkg/db/postgres" + "openreplay/backend/pkg/metrics" + databaseMetrics "openreplay/backend/pkg/metrics/database" + httpMetrics "openreplay/backend/pkg/metrics/http" + "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" ) func main() { - metrics := monitoring.New("http") + m := metrics.New() + m.Register(httpMetrics.List()) + m.Register(databaseMetrics.List()) + log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := http.New() @@ -31,14 +36,14 @@ func main() { defer producer.Close(15000) // Connect to database - dbConn := cache.NewPGCache(postgres.NewConn(cfg.Postgres.String(), 0, 0, metrics), 1000*60*20) + dbConn := cache.NewPGCache(postgres.NewConn(cfg.Postgres.String(), 0, 0), 1000*60*20) defer dbConn.Close() // Build all services services := services.New(cfg, producer, dbConn) // Init server's routes - router, err := router.NewRouter(cfg, services, metrics) + router, err := router.NewRouter(cfg, services) if err != nil { log.Fatalf("failed while creating engine: %s", err) } diff --git a/backend/cmd/integrations/main.go b/backend/cmd/integrations/main.go index 8c6d56966..3fa07ee9c 100644 --- a/backend/cmd/integrations/main.go +++ b/backend/cmd/integrations/main.go @@ -2,24 +2,26 @@ package main import ( "log" - config "openreplay/backend/internal/config/integrations" - "openreplay/backend/internal/integrations/clientManager" - "openreplay/backend/pkg/monitoring" - "openreplay/backend/pkg/pprof" - "time" - "os" "os/signal" "syscall" + "time" + config "openreplay/backend/internal/config/integrations" + "openreplay/backend/internal/integrations/clientManager" "openreplay/backend/pkg/db/postgres" "openreplay/backend/pkg/intervals" + "openreplay/backend/pkg/metrics" + databaseMetrics "openreplay/backend/pkg/metrics/database" + "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/token" ) func main() { - metrics := monitoring.New("integrations") + m := metrics.New() + m.Register(databaseMetrics.List()) + log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := config.New() @@ -27,7 +29,7 @@ func main() { pprof.StartProfilingServer() } - pg := postgres.NewConn(cfg.Postgres.String(), 0, 0, metrics) + pg := postgres.NewConn(cfg.Postgres.String(), 0, 0) defer pg.Close() tokenizer := token.NewTokenizer(cfg.TokenSecret) diff --git a/backend/cmd/sink/main.go b/backend/cmd/sink/main.go index 74e0b1db1..4bbaeeee4 100644 --- a/backend/cmd/sink/main.go +++ b/backend/cmd/sink/main.go @@ -2,10 +2,8 @@ package main import ( "bytes" - "context" "encoding/binary" "log" - "openreplay/backend/pkg/pprof" "os" "os/signal" "syscall" @@ -16,13 +14,16 @@ import ( "openreplay/backend/internal/sink/sessionwriter" "openreplay/backend/internal/storage" "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/monitoring" + "openreplay/backend/pkg/metrics" + sinkMetrics "openreplay/backend/pkg/metrics/sink" + "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/url/assets" ) func main() { - metrics := monitoring.New("sink") + m := metrics.New() + m.Register(sinkMetrics.List()) log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := sink.New() @@ -39,22 +40,8 @@ func main() { producer := queue.NewProducer(cfg.MessageSizeLimit, true) defer producer.Close(cfg.ProducerCloseTimeout) rewriter := assets.NewRewriter(cfg.AssetsOrigin) - assetMessageHandler := assetscache.New(cfg, rewriter, producer, metrics) - + assetMessageHandler := assetscache.New(cfg, rewriter, producer) counter := storage.NewLogCounter() - // Session message metrics - totalMessages, err := metrics.RegisterCounter("messages_total") - if err != nil { - log.Printf("can't create messages_total metric: %s", err) - } - savedMessages, err := metrics.RegisterCounter("messages_saved") - if err != nil { - log.Printf("can't create messages_saved metric: %s", err) - } - messageSize, err := metrics.RegisterHistogram("messages_size") - if err != nil { - log.Printf("can't create messages_size metric: %s", err) - } var ( sessionID uint64 @@ -74,11 +61,12 @@ func main() { if domBuffer.Len() <= 0 && devBuffer.Len() <= 0 { return } + sinkMetrics.RecordWrittenBytes(float64(domBuffer.Len()), "dom") + sinkMetrics.RecordWrittenBytes(float64(devBuffer.Len()), "devtools") // Write buffered batches to the session if err := writer.Write(sessionID, domBuffer.Bytes(), devBuffer.Bytes()); err != nil { log.Printf("writer error: %s", err) - return } // Prepare buffer for the next batch @@ -88,8 +76,7 @@ func main() { return } - // [METRICS] Increase the number of processed messages - totalMessages.Add(context.Background(), 1) + sinkMetrics.IncreaseTotalMessages() // Send SessionEnd trigger to storage service if msg.TypeID() == messages.MsgSessionEnd { @@ -187,9 +174,8 @@ func main() { } } - // [METRICS] Increase the number of written to the files messages and the message size - messageSize.Record(context.Background(), float64(len(msg.Encode()))) - savedMessages.Add(context.Background(), 1) + sinkMetrics.IncreaseWrittenMessages() + sinkMetrics.RecordMessageSize(float64(len(msg.Encode()))) } consumer := queue.NewConsumer( diff --git a/backend/cmd/storage/main.go b/backend/cmd/storage/main.go index dcb1b53ed..472324b95 100644 --- a/backend/cmd/storage/main.go +++ b/backend/cmd/storage/main.go @@ -2,7 +2,6 @@ package main import ( "log" - "openreplay/backend/pkg/pprof" "os" "os/signal" "syscall" @@ -12,13 +11,17 @@ import ( "openreplay/backend/internal/storage" "openreplay/backend/pkg/failover" "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/monitoring" + "openreplay/backend/pkg/metrics" + storageMetrics "openreplay/backend/pkg/metrics/storage" + "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" - s3storage "openreplay/backend/pkg/storage" + cloud "openreplay/backend/pkg/storage" ) func main() { - metrics := monitoring.New("storage") + m := metrics.New() + m.Register(storageMetrics.List()) + log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := config.New() @@ -26,8 +29,8 @@ func main() { pprof.StartProfilingServer() } - s3 := s3storage.NewS3(cfg.S3Region, cfg.S3Bucket) - srv, err := storage.New(cfg, s3, metrics) + s3 := cloud.NewS3(cfg.S3Region, cfg.S3Bucket) + srv, err := storage.New(cfg, s3) if err != nil { log.Printf("can't init storage service: %s", err) return diff --git a/backend/internal/assets/cacher/cacher.go b/backend/internal/assets/cacher/cacher.go index 8bbee092f..4b0353a9a 100644 --- a/backend/internal/assets/cacher/cacher.go +++ b/backend/internal/assets/cacher/cacher.go @@ -1,16 +1,13 @@ package cacher import ( - "context" "crypto/tls" "fmt" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "io" "io/ioutil" - "log" "mime" "net/http" - "openreplay/backend/pkg/monitoring" + metrics "openreplay/backend/pkg/metrics/assets" "path/filepath" "strings" "time" @@ -25,30 +22,22 @@ import ( const MAX_CACHE_DEPTH = 5 type cacher struct { - timeoutMap *timeoutMap // Concurrency implemented - s3 *storage.S3 // AWS Docs: "These clients are safe to use concurrently." - httpClient *http.Client // Docs: "Clients are safe for concurrent use by multiple goroutines." - rewriter *assets.Rewriter // Read only - Errors chan error - sizeLimit int - downloadedAssets syncfloat64.Counter - requestHeaders map[string]string - workers *WorkerPool + timeoutMap *timeoutMap // Concurrency implemented + s3 *storage.S3 // AWS Docs: "These clients are safe to use concurrently." + httpClient *http.Client // Docs: "Clients are safe for concurrent use by multiple goroutines." + rewriter *assets.Rewriter // Read only + Errors chan error + sizeLimit int + requestHeaders map[string]string + workers *WorkerPool } func (c *cacher) CanCache() bool { return c.workers.CanAddTask() } -func NewCacher(cfg *config.Config, metrics *monitoring.Metrics) *cacher { +func NewCacher(cfg *config.Config) *cacher { rewriter := assets.NewRewriter(cfg.AssetsOrigin) - if metrics == nil { - log.Fatalf("metrics are empty") - } - downloadedAssets, err := metrics.RegisterCounter("assets_downloaded") - if err != nil { - log.Printf("can't create downloaded_assets metric: %s", err) - } c := &cacher{ timeoutMap: newTimeoutMap(), s3: storage.NewS3(cfg.AWSRegion, cfg.S3BucketAssets), @@ -59,11 +48,10 @@ func NewCacher(cfg *config.Config, metrics *monitoring.Metrics) *cacher { TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, }, }, - rewriter: rewriter, - Errors: make(chan error), - sizeLimit: cfg.AssetsSizeLimit, - downloadedAssets: downloadedAssets, - requestHeaders: cfg.AssetsRequestHeaders, + rewriter: rewriter, + Errors: make(chan error), + sizeLimit: cfg.AssetsSizeLimit, + requestHeaders: cfg.AssetsRequestHeaders, } c.workers = NewPool(64, c.CacheFile) return c @@ -75,6 +63,7 @@ func (c *cacher) CacheFile(task *Task) { func (c *cacher) cacheURL(t *Task) { t.retries-- + start := time.Now() req, _ := http.NewRequest("GET", t.requestURL, nil) if t.retries%2 == 0 { req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 6.1; rv:31.0) Gecko/20100101 Firefox/31.0") @@ -87,6 +76,7 @@ func (c *cacher) cacheURL(t *Task) { c.Errors <- errors.Wrap(err, t.urlContext) return } + metrics.RecordDownloadDuration(float64(time.Now().Sub(start).Milliseconds()), res.StatusCode) defer res.Body.Close() if res.StatusCode >= 400 { printErr := true @@ -122,12 +112,15 @@ func (c *cacher) cacheURL(t *Task) { } // TODO: implement in streams + start = time.Now() err = c.s3.Upload(strings.NewReader(strData), t.cachePath, contentType, false) if err != nil { + metrics.RecordUploadDuration(float64(time.Now().Sub(start).Milliseconds()), true) c.Errors <- errors.Wrap(err, t.urlContext) return } - c.downloadedAssets.Add(context.Background(), 1) + metrics.RecordUploadDuration(float64(time.Now().Sub(start).Milliseconds()), false) + metrics.IncreaseSavedSessions() if isCSS { if t.depth > 0 { diff --git a/backend/internal/http/router/handlers-ios.go b/backend/internal/http/router/handlers-ios.go index e0fc73b6f..b11918d54 100644 --- a/backend/internal/http/router/handlers-ios.go +++ b/backend/internal/http/router/handlers-ios.go @@ -22,28 +22,28 @@ func (e *Router) startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) req := &StartIOSSessionRequest{} if r.Body == nil { - ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty")) + ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty"), startTime, r.URL.Path, 0) return } body := http.MaxBytesReader(w, r.Body, e.cfg.JsonSizeLimit) defer body.Close() if err := json.NewDecoder(body).Decode(req); err != nil { - ResponseWithError(w, http.StatusBadRequest, err) + ResponseWithError(w, http.StatusBadRequest, err, startTime, r.URL.Path, 0) return } if req.ProjectKey == nil { - ResponseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required")) + ResponseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required"), startTime, r.URL.Path, 0) return } p, err := e.services.Database.GetProjectByKey(*req.ProjectKey) if err != nil { if postgres.IsNoRowsErr(err) { - ResponseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or is not active")) + ResponseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or is not active"), startTime, r.URL.Path, 0) } else { - ResponseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging + ResponseWithError(w, http.StatusInternalServerError, err, startTime, r.URL.Path, 0) // TODO: send error here only on staging } return } @@ -53,18 +53,18 @@ func (e *Router) startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) if err != nil { // Starting the new one dice := byte(rand.Intn(100)) // [0, 100) if dice >= p.SampleRate { - ResponseWithError(w, http.StatusForbidden, errors.New("cancel")) + ResponseWithError(w, http.StatusForbidden, errors.New("cancel"), startTime, r.URL.Path, 0) return } ua := e.services.UaParser.ParseFromHTTPRequest(r) if ua == nil { - ResponseWithError(w, http.StatusForbidden, errors.New("browser not recognized")) + ResponseWithError(w, http.StatusForbidden, errors.New("browser not recognized"), startTime, r.URL.Path, 0) return } sessionID, err := e.services.Flaker.Compose(uint64(startTime.UnixMilli())) if err != nil { - ResponseWithError(w, http.StatusInternalServerError, err) + ResponseWithError(w, http.StatusInternalServerError, err, startTime, r.URL.Path, 0) return } // TODO: if EXPIRED => send message for two sessions association @@ -94,22 +94,24 @@ func (e *Router) startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) UserUUID: userUUID, SessionID: strconv.FormatUint(tokenData.ID, 10), BeaconSizeLimit: e.cfg.BeaconSizeLimit, - }) + }, startTime, r.URL.Path, 0) } func (e *Router) pushMessagesHandlerIOS(w http.ResponseWriter, r *http.Request) { + startTime := time.Now() sessionData, err := e.services.Tokenizer.ParseFromHTTPRequest(r) if err != nil { - ResponseWithError(w, http.StatusUnauthorized, err) + ResponseWithError(w, http.StatusUnauthorized, err, startTime, r.URL.Path, 0) return } e.pushMessages(w, r, sessionData.ID, e.cfg.TopicRawIOS) } func (e *Router) pushLateMessagesHandlerIOS(w http.ResponseWriter, r *http.Request) { + startTime := time.Now() sessionData, err := e.services.Tokenizer.ParseFromHTTPRequest(r) if err != nil && err != token.EXPIRED { - ResponseWithError(w, http.StatusUnauthorized, err) + ResponseWithError(w, http.StatusUnauthorized, err, startTime, r.URL.Path, 0) return } // Check timestamps here? @@ -117,16 +119,17 @@ func (e *Router) pushLateMessagesHandlerIOS(w http.ResponseWriter, r *http.Reque } func (e *Router) imagesUploadHandlerIOS(w http.ResponseWriter, r *http.Request) { + startTime := time.Now() log.Printf("recieved imagerequest") sessionData, err := e.services.Tokenizer.ParseFromHTTPRequest(r) if err != nil { // Should accept expired token? - ResponseWithError(w, http.StatusUnauthorized, err) + ResponseWithError(w, http.StatusUnauthorized, err, startTime, r.URL.Path, 0) return } if r.Body == nil { - ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty")) + ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty"), startTime, r.URL.Path, 0) return } r.Body = http.MaxBytesReader(w, r.Body, e.cfg.FileSizeLimit) @@ -134,21 +137,21 @@ func (e *Router) imagesUploadHandlerIOS(w http.ResponseWriter, r *http.Request) err = r.ParseMultipartForm(1e6) // ~1Mb if err == http.ErrNotMultipart || err == http.ErrMissingBoundary { - ResponseWithError(w, http.StatusUnsupportedMediaType, err) + ResponseWithError(w, http.StatusUnsupportedMediaType, err, startTime, r.URL.Path, 0) return // } else if err == multipart.ErrMessageTooLarge // if non-files part exceeds 10 MB } else if err != nil { - ResponseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging + ResponseWithError(w, http.StatusInternalServerError, err, startTime, r.URL.Path, 0) // TODO: send error here only on staging return } if r.MultipartForm == nil { - ResponseWithError(w, http.StatusInternalServerError, errors.New("Multipart not parsed")) + ResponseWithError(w, http.StatusInternalServerError, errors.New("Multipart not parsed"), startTime, r.URL.Path, 0) return } if len(r.MultipartForm.Value["projectKey"]) == 0 { - ResponseWithError(w, http.StatusBadRequest, errors.New("projectKey parameter missing")) // status for missing/wrong parameter? + ResponseWithError(w, http.StatusBadRequest, errors.New("projectKey parameter missing"), startTime, r.URL.Path, 0) // status for missing/wrong parameter? return } diff --git a/backend/internal/http/router/handlers-web.go b/backend/internal/http/router/handlers-web.go index 7afd184e5..52a37b7f0 100644 --- a/backend/internal/http/router/handlers-web.go +++ b/backend/internal/http/router/handlers-web.go @@ -3,18 +3,17 @@ package router import ( "encoding/json" "errors" - "github.com/Masterminds/semver" - "go.opentelemetry.io/otel/attribute" "io" "log" "math/rand" "net/http" - "openreplay/backend/internal/http/uuid" - "openreplay/backend/pkg/flakeid" "strconv" "time" + "github.com/Masterminds/semver" + "openreplay/backend/internal/http/uuid" "openreplay/backend/pkg/db/postgres" + "openreplay/backend/pkg/flakeid" . "openreplay/backend/pkg/messages" "openreplay/backend/pkg/token" ) @@ -28,13 +27,6 @@ func (e *Router) readBody(w http.ResponseWriter, r *http.Request, limit int64) ( if err != nil { return nil, err } - - reqSize := len(bodyBytes) - e.requestSize.Record( - r.Context(), - float64(reqSize), - []attribute.KeyValue{attribute.String("method", r.URL.Path)}..., - ) return bodyBytes, nil } @@ -56,40 +48,43 @@ func getSessionTimestamp(req *StartSessionRequest, startTimeMili int64) (ts uint func (e *Router) startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) { startTime := time.Now() + bodySize := 0 // Check request body if r.Body == nil { - ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty")) + ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty"), startTime, r.URL.Path, bodySize) return } bodyBytes, err := e.readBody(w, r, e.cfg.JsonSizeLimit) if err != nil { log.Printf("error while reading request body: %s", err) - ResponseWithError(w, http.StatusRequestEntityTooLarge, err) + ResponseWithError(w, http.StatusRequestEntityTooLarge, err, startTime, r.URL.Path, bodySize) return } + bodySize = len(bodyBytes) // Parse request body req := &StartSessionRequest{} if err := json.Unmarshal(bodyBytes, req); err != nil { - ResponseWithError(w, http.StatusBadRequest, err) + ResponseWithError(w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) return } // Handler's logic if req.ProjectKey == nil { - ResponseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required")) + ResponseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required"), startTime, r.URL.Path, bodySize) return } p, err := e.services.Database.GetProjectByKey(*req.ProjectKey) if err != nil { if postgres.IsNoRowsErr(err) { - ResponseWithError(w, http.StatusNotFound, errors.New("project doesn't exist or capture limit has been reached")) + ResponseWithError(w, http.StatusNotFound, + errors.New("project doesn't exist or capture limit has been reached"), startTime, r.URL.Path, bodySize) } else { log.Printf("can't get project by key: %s", err) - ResponseWithError(w, http.StatusInternalServerError, errors.New("can't get project by key")) + ResponseWithError(w, http.StatusInternalServerError, errors.New("can't get project by key"), startTime, r.URL.Path, bodySize) } return } @@ -99,19 +94,19 @@ func (e *Router) startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) if err != nil || req.Reset { // Starting the new one dice := byte(rand.Intn(100)) // [0, 100) if dice >= p.SampleRate { - ResponseWithError(w, http.StatusForbidden, errors.New("cancel")) + ResponseWithError(w, http.StatusForbidden, errors.New("cancel"), startTime, r.URL.Path, bodySize) return } ua := e.services.UaParser.ParseFromHTTPRequest(r) if ua == nil { - ResponseWithError(w, http.StatusForbidden, errors.New("browser not recognized")) + ResponseWithError(w, http.StatusForbidden, errors.New("browser not recognized"), startTime, r.URL.Path, bodySize) return } startTimeMili := startTime.UnixMilli() sessionID, err := e.services.Flaker.Compose(uint64(startTimeMili)) if err != nil { - ResponseWithError(w, http.StatusInternalServerError, err) + ResponseWithError(w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return } // TODO: if EXPIRED => send message for two sessions association @@ -163,29 +158,33 @@ func (e *Router) startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) BeaconSizeLimit: e.getBeaconSize(tokenData.ID), StartTimestamp: int64(flakeid.ExtractTimestamp(tokenData.ID)), Delay: tokenData.Delay, - }) + }, startTime, r.URL.Path, bodySize) } func (e *Router) pushMessagesHandlerWeb(w http.ResponseWriter, r *http.Request) { + startTime := time.Now() + bodySize := 0 + // Check authorization sessionData, err := e.services.Tokenizer.ParseFromHTTPRequest(r) if err != nil { - ResponseWithError(w, http.StatusUnauthorized, err) + ResponseWithError(w, http.StatusUnauthorized, err, startTime, r.URL.Path, bodySize) return } // Check request body if r.Body == nil { - ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty")) + ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty"), startTime, r.URL.Path, bodySize) return } bodyBytes, err := e.readBody(w, r, e.getBeaconSize(sessionData.ID)) if err != nil { log.Printf("error while reading request body: %s", err) - ResponseWithError(w, http.StatusRequestEntityTooLarge, err) + ResponseWithError(w, http.StatusRequestEntityTooLarge, err, startTime, r.URL.Path, bodySize) return } + bodySize = len(bodyBytes) // Send processed messages to queue as array of bytes // TODO: check bytes for nonsense crap @@ -194,39 +193,43 @@ func (e *Router) pushMessagesHandlerWeb(w http.ResponseWriter, r *http.Request) log.Printf("can't send processed messages to queue: %s", err) } - w.WriteHeader(http.StatusOK) + ResponseOK(w, startTime, r.URL.Path, bodySize) } func (e *Router) notStartedHandlerWeb(w http.ResponseWriter, r *http.Request) { + startTime := time.Now() + bodySize := 0 + // Check request body if r.Body == nil { - ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty")) + ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty"), startTime, r.URL.Path, bodySize) return } bodyBytes, err := e.readBody(w, r, e.cfg.JsonSizeLimit) if err != nil { log.Printf("error while reading request body: %s", err) - ResponseWithError(w, http.StatusRequestEntityTooLarge, err) + ResponseWithError(w, http.StatusRequestEntityTooLarge, err, startTime, r.URL.Path, bodySize) return } + bodySize = len(bodyBytes) // Parse request body req := &NotStartedRequest{} if err := json.Unmarshal(bodyBytes, req); err != nil { - ResponseWithError(w, http.StatusBadRequest, err) + ResponseWithError(w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) return } // Handler's logic if req.ProjectKey == nil { - ResponseWithError(w, http.StatusForbidden, errors.New("projectKey value required")) + ResponseWithError(w, http.StatusForbidden, errors.New("projectKey value required"), startTime, r.URL.Path, bodySize) return } ua := e.services.UaParser.ParseFromHTTPRequest(r) // TODO?: insert anyway if ua == nil { - ResponseWithError(w, http.StatusForbidden, errors.New("browser not recognized")) + ResponseWithError(w, http.StatusForbidden, errors.New("browser not recognized"), startTime, r.URL.Path, bodySize) return } country := e.services.GeoIP.ExtractISOCodeFromHTTPRequest(r) @@ -248,5 +251,5 @@ func (e *Router) notStartedHandlerWeb(w http.ResponseWriter, r *http.Request) { log.Printf("Unable to insert Unstarted Session: %v\n", err) } - w.WriteHeader(http.StatusOK) + ResponseOK(w, startTime, r.URL.Path, bodySize) } diff --git a/backend/internal/http/router/handlers.go b/backend/internal/http/router/handlers.go index c36fdd668..425177341 100644 --- a/backend/internal/http/router/handlers.go +++ b/backend/internal/http/router/handlers.go @@ -6,9 +6,11 @@ import ( "io/ioutil" "log" "net/http" + "time" ) func (e *Router) pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64, topicName string) { + start := time.Now() body := http.MaxBytesReader(w, r.Body, e.cfg.BeaconSizeLimit) defer body.Close() @@ -21,7 +23,7 @@ func (e *Router) pushMessages(w http.ResponseWriter, r *http.Request, sessionID reader, err = gzip.NewReader(body) if err != nil { - ResponseWithError(w, http.StatusInternalServerError, err) // TODO: stage-dependent response + ResponseWithError(w, http.StatusInternalServerError, err, start, r.URL.Path, 0) // TODO: stage-dependent response return } //log.Println("Gzip reader init", reader) @@ -32,7 +34,7 @@ func (e *Router) pushMessages(w http.ResponseWriter, r *http.Request, sessionID //log.Println("Reader after switch:", reader) buf, err := ioutil.ReadAll(reader) if err != nil { - ResponseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging + ResponseWithError(w, http.StatusInternalServerError, err, start, r.URL.Path, 0) // TODO: send error here only on staging return } e.services.Producer.Produce(topicName, sessionID, buf) // What if not able to send? diff --git a/backend/internal/http/router/response.go b/backend/internal/http/router/response.go index 0b4725419..b66b7c563 100644 --- a/backend/internal/http/router/response.go +++ b/backend/internal/http/router/response.go @@ -4,21 +4,44 @@ import ( "encoding/json" "log" "net/http" + "time" + + metrics "openreplay/backend/pkg/metrics/http" ) -func ResponseWithJSON(w http.ResponseWriter, res interface{}) { +func recordMetrics(requestStart time.Time, url string, code, bodySize int) { + if bodySize > 0 { + metrics.RecordRequestSize(float64(bodySize), url, code) + } + metrics.IncreaseTotalRequests() + metrics.RecordRequestDuration(float64(time.Now().Sub(requestStart).Milliseconds()), url, code) +} + +func ResponseOK(w http.ResponseWriter, requestStart time.Time, url string, bodySize int) { + w.WriteHeader(http.StatusOK) + recordMetrics(requestStart, url, http.StatusOK, bodySize) +} + +func ResponseWithJSON(w http.ResponseWriter, res interface{}, requestStart time.Time, url string, bodySize int) { body, err := json.Marshal(res) if err != nil { log.Println(err) } w.Header().Set("Content-Type", "application/json") w.Write(body) + recordMetrics(requestStart, url, http.StatusOK, bodySize) } -func ResponseWithError(w http.ResponseWriter, code int, err error) { - type response struct { - Error string `json:"error"` +type response struct { + Error string `json:"error"` +} + +func ResponseWithError(w http.ResponseWriter, code int, err error, requestStart time.Time, url string, bodySize int) { + body, err := json.Marshal(&response{err.Error()}) + if err != nil { + log.Println(err) } w.WriteHeader(code) - ResponseWithJSON(w, &response{err.Error()}) + w.Write(body) + recordMetrics(requestStart, url, code, bodySize) } diff --git a/backend/internal/http/router/router.go b/backend/internal/http/router/router.go index 964016dfd..6cd7efe79 100644 --- a/backend/internal/http/router/router.go +++ b/backend/internal/http/router/router.go @@ -1,19 +1,16 @@ package router import ( - "context" "fmt" - "github.com/gorilla/mux" - "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "log" "net/http" + "sync" + "time" + + "github.com/gorilla/mux" http3 "openreplay/backend/internal/config/http" http2 "openreplay/backend/internal/http/services" "openreplay/backend/internal/http/util" - "openreplay/backend/pkg/monitoring" - "sync" - "time" ) type BeaconSize struct { @@ -25,21 +22,16 @@ type Router struct { router *mux.Router cfg *http3.Config services *http2.ServicesBuilder - requestSize syncfloat64.Histogram - requestDuration syncfloat64.Histogram - totalRequests syncfloat64.Counter mutex *sync.RWMutex beaconSizeCache map[uint64]*BeaconSize // Cache for session's beaconSize } -func NewRouter(cfg *http3.Config, services *http2.ServicesBuilder, metrics *monitoring.Metrics) (*Router, error) { +func NewRouter(cfg *http3.Config, services *http2.ServicesBuilder) (*Router, error) { switch { case cfg == nil: return nil, fmt.Errorf("config is empty") case services == nil: return nil, fmt.Errorf("services is empty") - case metrics == nil: - return nil, fmt.Errorf("metrics is empty") } e := &Router{ cfg: cfg, @@ -47,7 +39,6 @@ func NewRouter(cfg *http3.Config, services *http2.ServicesBuilder, metrics *moni mutex: &sync.RWMutex{}, beaconSizeCache: make(map[uint64]*BeaconSize), } - e.initMetrics(metrics) e.init() go e.clearBeaconSizes() return e, nil @@ -115,22 +106,6 @@ func (e *Router) init() { e.router.Use(e.corsMiddleware) } -func (e *Router) initMetrics(metrics *monitoring.Metrics) { - var err error - e.requestSize, err = metrics.RegisterHistogram("requests_body_size") - if err != nil { - log.Printf("can't create requests_body_size metric: %s", err) - } - e.requestDuration, err = metrics.RegisterHistogram("requests_duration") - if err != nil { - log.Printf("can't create requests_duration metric: %s", err) - } - e.totalRequests, err = metrics.RegisterCounter("requests_total") - if err != nil { - log.Printf("can't create requests_total metric: %s", err) - } -} - func (e *Router) root(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) } @@ -149,17 +124,8 @@ func (e *Router) corsMiddleware(next http.Handler) http.Handler { log.Printf("Request: %v - %v ", r.Method, util.SafeString(r.URL.Path)) - requestStart := time.Now() - // Serve request next.ServeHTTP(w, r) - - metricsContext, _ := context.WithTimeout(context.Background(), time.Millisecond*100) - e.totalRequests.Add(metricsContext, 1) - e.requestDuration.Record(metricsContext, - float64(time.Now().Sub(requestStart).Milliseconds()), - []attribute.KeyValue{attribute.String("method", r.URL.Path)}..., - ) }) } diff --git a/backend/internal/sessionender/ender.go b/backend/internal/sessionender/ender.go index c1c2c9b7f..e1ddb0ffe 100644 --- a/backend/internal/sessionender/ender.go +++ b/backend/internal/sessionender/ender.go @@ -1,13 +1,11 @@ package sessionender import ( - "context" - "fmt" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "log" - "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/monitoring" "time" + + "openreplay/backend/pkg/messages" + "openreplay/backend/pkg/metrics/ender" ) // EndedSessionHandler handler for ended sessions @@ -23,32 +21,16 @@ type session struct { // SessionEnder updates timestamp of last message for each session type SessionEnder struct { - timeout int64 - sessions map[uint64]*session // map[sessionID]session - timeCtrl *timeController - activeSessions syncfloat64.UpDownCounter - totalSessions syncfloat64.Counter + timeout int64 + sessions map[uint64]*session // map[sessionID]session + timeCtrl *timeController } -func New(metrics *monitoring.Metrics, timeout int64, parts int) (*SessionEnder, error) { - if metrics == nil { - return nil, fmt.Errorf("metrics module is empty") - } - activeSessions, err := metrics.RegisterUpDownCounter("sessions_active") - if err != nil { - return nil, fmt.Errorf("can't register session.active metric: %s", err) - } - totalSessions, err := metrics.RegisterCounter("sessions_total") - if err != nil { - return nil, fmt.Errorf("can't register session.total metric: %s", err) - } - +func New(timeout int64, parts int) (*SessionEnder, error) { return &SessionEnder{ - timeout: timeout, - sessions: make(map[uint64]*session), - timeCtrl: NewTimeController(parts), - activeSessions: activeSessions, - totalSessions: totalSessions, + timeout: timeout, + sessions: make(map[uint64]*session), + timeCtrl: NewTimeController(parts), }, nil } @@ -74,8 +56,8 @@ func (se *SessionEnder) UpdateSession(msg messages.Message) { lastUserTime: msgTimestamp, // last timestamp from user's machine isEnded: false, } - se.activeSessions.Add(context.Background(), 1) - se.totalSessions.Add(context.Background(), 1) + ender.IncreaseActiveSessions() + ender.IncreaseTotalSessions() return } // Keep the highest user's timestamp for correct session duration value @@ -100,7 +82,8 @@ func (se *SessionEnder) HandleEndedSessions(handler EndedSessionHandler) { sess.isEnded = true if handler(sessID, sess.lastUserTime) { delete(se.sessions, sessID) - se.activeSessions.Add(context.Background(), -1) + ender.DecreaseActiveSessions() + ender.IncreaseClosedSessions() removedSessions++ } else { log.Printf("sessID: %d, userTime: %d", sessID, sess.lastUserTime) diff --git a/backend/internal/sink/assetscache/assets.go b/backend/internal/sink/assetscache/assets.go index 4c63f6897..387ee5c92 100644 --- a/backend/internal/sink/assetscache/assets.go +++ b/backend/internal/sink/assetscache/assets.go @@ -1,20 +1,19 @@ package assetscache import ( - "context" "crypto/md5" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "io" "log" "net/url" - "openreplay/backend/internal/config/sink" - "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/monitoring" - "openreplay/backend/pkg/queue/types" - "openreplay/backend/pkg/url/assets" + metrics "openreplay/backend/pkg/metrics/sink" "strings" "sync" "time" + + "openreplay/backend/internal/config/sink" + "openreplay/backend/pkg/messages" + "openreplay/backend/pkg/queue/types" + "openreplay/backend/pkg/url/assets" ) type CachedAsset struct { @@ -23,52 +22,21 @@ type CachedAsset struct { } type AssetsCache struct { - mutex sync.RWMutex - cfg *sink.Config - rewriter *assets.Rewriter - producer types.Producer - cache map[string]*CachedAsset - blackList []string // use "example.com" to filter all domains or ".example.com" to filter only third-level domain - totalAssets syncfloat64.Counter - cachedAssets syncfloat64.Counter - skippedAssets syncfloat64.Counter - assetSize syncfloat64.Histogram - assetDuration syncfloat64.Histogram + mutex sync.RWMutex + cfg *sink.Config + rewriter *assets.Rewriter + producer types.Producer + cache map[string]*CachedAsset + blackList []string // use "example.com" to filter all domains or ".example.com" to filter only third-level domain } -func New(cfg *sink.Config, rewriter *assets.Rewriter, producer types.Producer, metrics *monitoring.Metrics) *AssetsCache { - // Assets metrics - totalAssets, err := metrics.RegisterCounter("assets_total") - if err != nil { - log.Printf("can't create assets_total metric: %s", err) - } - cachedAssets, err := metrics.RegisterCounter("assets_cached") - if err != nil { - log.Printf("can't create assets_cached metric: %s", err) - } - skippedAssets, err := metrics.RegisterCounter("assets_skipped") - if err != nil { - log.Printf("can't create assets_skipped metric: %s", err) - } - assetSize, err := metrics.RegisterHistogram("asset_size") - if err != nil { - log.Printf("can't create asset_size metric: %s", err) - } - assetDuration, err := metrics.RegisterHistogram("asset_duration") - if err != nil { - log.Printf("can't create asset_duration metric: %s", err) - } +func New(cfg *sink.Config, rewriter *assets.Rewriter, producer types.Producer) *AssetsCache { assetsCache := &AssetsCache{ - cfg: cfg, - rewriter: rewriter, - producer: producer, - cache: make(map[string]*CachedAsset, 64), - blackList: make([]string, 0), - totalAssets: totalAssets, - cachedAssets: cachedAssets, - skippedAssets: skippedAssets, - assetSize: assetSize, - assetDuration: assetDuration, + cfg: cfg, + rewriter: rewriter, + producer: producer, + cache: make(map[string]*CachedAsset, 64), + blackList: make([]string, 0), } // Parse black list for cache layer if len(cfg.CacheBlackList) > 0 { @@ -84,7 +52,7 @@ func New(cfg *sink.Config, rewriter *assets.Rewriter, producer types.Producer, m } func (e *AssetsCache) cleaner() { - cleanTick := time.Tick(time.Minute * 30) + cleanTick := time.Tick(time.Minute * 3) for { select { case <-cleanTick: @@ -105,6 +73,7 @@ func (e *AssetsCache) clearCache() { if int64(now.Sub(cache.ts).Minutes()) > e.cfg.CacheExpiration { deleted++ delete(e.cache, id) + metrics.DecreaseCachedAssets() } } log.Printf("cache cleaner: deleted %d/%d assets", deleted, cacheSize) @@ -232,8 +201,7 @@ func parseHost(baseURL string) (string, error) { } func (e *AssetsCache) handleCSS(sessionID uint64, baseURL string, css string) string { - ctx := context.Background() - e.totalAssets.Add(ctx, 1) + metrics.IncreaseTotalAssets() // Try to find asset in cache h := md5.New() // Cut first part of url (scheme + host) @@ -255,7 +223,7 @@ func (e *AssetsCache) handleCSS(sessionID uint64, baseURL string, css string) st e.mutex.RUnlock() if ok { if int64(time.Now().Sub(cachedAsset.ts).Minutes()) < e.cfg.CacheExpiration { - e.skippedAssets.Add(ctx, 1) + metrics.IncreaseSkippedAssets() return cachedAsset.msg } } @@ -267,8 +235,8 @@ func (e *AssetsCache) handleCSS(sessionID uint64, baseURL string, css string) st start := time.Now() res := e.getRewrittenCSS(sessionID, baseURL, css) duration := time.Now().Sub(start).Milliseconds() - e.assetSize.Record(ctx, float64(len(res))) - e.assetDuration.Record(ctx, float64(duration)) + metrics.RecordAssetSize(float64(len(res))) + metrics.RecordProcessAssetDuration(float64(duration)) // Save asset to cache if we spent more than threshold if duration > e.cfg.CacheThreshold { e.mutex.Lock() @@ -277,7 +245,7 @@ func (e *AssetsCache) handleCSS(sessionID uint64, baseURL string, css string) st ts: time.Now(), } e.mutex.Unlock() - e.cachedAssets.Add(ctx, 1) + metrics.IncreaseCachedAssets() } // Return rewritten asset return res diff --git a/backend/internal/storage/storage.go b/backend/internal/storage/storage.go index fbe9e2228..1e2507163 100644 --- a/backend/internal/storage/storage.go +++ b/backend/internal/storage/storage.go @@ -2,20 +2,20 @@ package storage import ( "bytes" - "context" "fmt" - gzip "github.com/klauspost/pgzip" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "log" - config "openreplay/backend/internal/config/storage" - "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/monitoring" - "openreplay/backend/pkg/storage" "os" "strconv" "strings" "sync" "time" + + config "openreplay/backend/internal/config/storage" + "openreplay/backend/pkg/messages" + metrics "openreplay/backend/pkg/metrics/storage" + "openreplay/backend/pkg/storage" + + gzip "github.com/klauspost/pgzip" ) type FileType string @@ -25,6 +25,13 @@ const ( DEV FileType = "/devtools.mob" ) +func (t FileType) String() string { + if t == DOM { + return "dom" + } + return "devtools" +} + type Task struct { id string doms *bytes.Buffer @@ -36,92 +43,23 @@ type Storage struct { cfg *config.Config s3 *storage.S3 startBytes []byte - - totalSessions syncfloat64.Counter - sessionDOMSize syncfloat64.Histogram - sessionDEVSize syncfloat64.Histogram - readingDOMTime syncfloat64.Histogram - readingDEVTime syncfloat64.Histogram - sortingDOMTime syncfloat64.Histogram - sortingDEVTime syncfloat64.Histogram - archivingDOMTime syncfloat64.Histogram - archivingDEVTime syncfloat64.Histogram - uploadingDOMTime syncfloat64.Histogram - uploadingDEVTime syncfloat64.Histogram - - tasks chan *Task - ready chan struct{} + tasks chan *Task + ready chan struct{} } -func New(cfg *config.Config, s3 *storage.S3, metrics *monitoring.Metrics) (*Storage, error) { +func New(cfg *config.Config, s3 *storage.S3) (*Storage, error) { switch { case cfg == nil: return nil, fmt.Errorf("config is empty") case s3 == nil: return nil, fmt.Errorf("s3 storage is empty") } - // Create metrics - totalSessions, err := metrics.RegisterCounter("sessions_total") - if err != nil { - log.Printf("can't create sessions_total metric: %s", err) - } - sessionDOMSize, err := metrics.RegisterHistogram("sessions_size") - if err != nil { - log.Printf("can't create session_size metric: %s", err) - } - sessionDevtoolsSize, err := metrics.RegisterHistogram("sessions_dt_size") - if err != nil { - log.Printf("can't create sessions_dt_size metric: %s", err) - } - readingDOMTime, err := metrics.RegisterHistogram("reading_duration") - if err != nil { - log.Printf("can't create reading_duration metric: %s", err) - } - readingDEVTime, err := metrics.RegisterHistogram("reading_dt_duration") - if err != nil { - log.Printf("can't create reading_duration metric: %s", err) - } - sortingDOMTime, err := metrics.RegisterHistogram("sorting_duration") - if err != nil { - log.Printf("can't create reading_duration metric: %s", err) - } - sortingDEVTime, err := metrics.RegisterHistogram("sorting_dt_duration") - if err != nil { - log.Printf("can't create reading_duration metric: %s", err) - } - archivingDOMTime, err := metrics.RegisterHistogram("archiving_duration") - if err != nil { - log.Printf("can't create archiving_duration metric: %s", err) - } - archivingDEVTime, err := metrics.RegisterHistogram("archiving_dt_duration") - if err != nil { - log.Printf("can't create archiving_duration metric: %s", err) - } - uploadingDOMTime, err := metrics.RegisterHistogram("uploading_duration") - if err != nil { - log.Printf("can't create uploading_duration metric: %s", err) - } - uploadingDEVTime, err := metrics.RegisterHistogram("uploading_dt_duration") - if err != nil { - log.Printf("can't create uploading_duration metric: %s", err) - } newStorage := &Storage{ - cfg: cfg, - s3: s3, - startBytes: make([]byte, cfg.FileSplitSize), - totalSessions: totalSessions, - sessionDOMSize: sessionDOMSize, - sessionDEVSize: sessionDevtoolsSize, - readingDOMTime: readingDOMTime, - readingDEVTime: readingDEVTime, - sortingDOMTime: sortingDOMTime, - sortingDEVTime: sortingDEVTime, - archivingDOMTime: archivingDOMTime, - archivingDEVTime: archivingDEVTime, - uploadingDOMTime: uploadingDOMTime, - uploadingDEVTime: uploadingDEVTime, - tasks: make(chan *Task, 1), - ready: make(chan struct{}), + cfg: cfg, + s3: s3, + startBytes: make([]byte, cfg.FileSplitSize), + tasks: make(chan *Task, 1), + ready: make(chan struct{}), } go newStorage.worker() return newStorage, nil @@ -187,11 +125,7 @@ func (s *Storage) openSession(filePath string, tp FileType) ([]byte, error) { if err != nil { return nil, fmt.Errorf("can't sort session, err: %s", err) } - if tp == DOM { - s.sortingDOMTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds())) - } else { - s.sortingDEVTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds())) - } + metrics.RecordSessionSortDuration(float64(time.Now().Sub(start).Milliseconds()), tp.String()) return res, nil } @@ -215,26 +149,19 @@ func (s *Storage) prepareSession(path string, tp FileType, task *Task) error { if err != nil { return err } - durRead := time.Now().Sub(startRead).Milliseconds() - // Send metrics - ctx, _ := context.WithTimeout(context.Background(), time.Millisecond*200) - if tp == DOM { - s.sessionDOMSize.Record(ctx, float64(len(mob))) - s.readingDOMTime.Record(ctx, float64(durRead)) - } else { - s.sessionDEVSize.Record(ctx, float64(len(mob))) - s.readingDEVTime.Record(ctx, float64(durRead)) - } + metrics.RecordSessionSize(float64(len(mob)), tp.String()) + metrics.RecordSessionReadDuration(float64(time.Now().Sub(startRead).Milliseconds()), tp.String()) + // Encode and compress session if tp == DEV { - startCompress := time.Now() + start := time.Now() task.dev = s.compressSession(mob) - s.archivingDEVTime.Record(ctx, float64(time.Now().Sub(startCompress).Milliseconds())) + metrics.RecordSessionCompressDuration(float64(time.Now().Sub(start).Milliseconds()), tp.String()) } else { if len(mob) <= s.cfg.FileSplitSize { - startCompress := time.Now() + start := time.Now() task.doms = s.compressSession(mob) - s.archivingDOMTime.Record(ctx, float64(time.Now().Sub(startCompress).Milliseconds())) + metrics.RecordSessionCompressDuration(float64(time.Now().Sub(start).Milliseconds()), tp.String()) return nil } wg := &sync.WaitGroup{} @@ -253,7 +180,7 @@ func (s *Storage) prepareSession(path string, tp FileType, task *Task) error { wg.Done() }() wg.Wait() - s.archivingDOMTime.Record(ctx, float64(firstPart+secondPart)) + metrics.RecordSessionCompressDuration(float64(firstPart+secondPart), tp.String()) } return nil } @@ -324,11 +251,9 @@ func (s *Storage) uploadSession(task *Task) { wg.Done() }() wg.Wait() - // Record metrics - ctx, _ := context.WithTimeout(context.Background(), time.Millisecond*200) - s.uploadingDOMTime.Record(ctx, float64(uploadDoms+uploadDome)) - s.uploadingDEVTime.Record(ctx, float64(uploadDev)) - s.totalSessions.Add(ctx, 1) + metrics.RecordSessionUploadDuration(float64(uploadDoms+uploadDome), DOM.String()) + metrics.RecordSessionUploadDuration(float64(uploadDev), DEV.String()) + metrics.IncreaseStorageTotalSessions() } func (s *Storage) worker() { diff --git a/backend/pkg/db/postgres/batches.go b/backend/pkg/db/postgres/batches.go index c1283da10..abdee36f2 100644 --- a/backend/pkg/db/postgres/batches.go +++ b/backend/pkg/db/postgres/batches.go @@ -1,14 +1,13 @@ package postgres import ( - "context" - "github.com/jackc/pgx/v4" - "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "log" - "openreplay/backend/pkg/monitoring" "strings" "time" + + "openreplay/backend/pkg/metrics/database" + + "github.com/jackc/pgx/v4" ) type batchItem struct { @@ -78,21 +77,17 @@ func NewBatchesTask(size int) *batchesTask { } type BatchSet struct { - c Pool - batches map[uint64]*SessionBatch - batchQueueLimit int - batchSizeLimit int - batchSizeBytes syncfloat64.Histogram - batchSizeLines syncfloat64.Histogram - sqlRequestTime syncfloat64.Histogram - sqlRequestCounter syncfloat64.Counter - updates map[uint64]*sessionUpdates - workerTask chan *batchesTask - done chan struct{} - finished chan struct{} + c Pool + batches map[uint64]*SessionBatch + batchQueueLimit int + batchSizeLimit int + updates map[uint64]*sessionUpdates + workerTask chan *batchesTask + done chan struct{} + finished chan struct{} } -func NewBatchSet(c Pool, queueLimit, sizeLimit int, metrics *monitoring.Metrics) *BatchSet { +func NewBatchSet(c Pool, queueLimit, sizeLimit int) *BatchSet { bs := &BatchSet{ c: c, batches: make(map[uint64]*SessionBatch), @@ -103,31 +98,10 @@ func NewBatchSet(c Pool, queueLimit, sizeLimit int, metrics *monitoring.Metrics) finished: make(chan struct{}), updates: make(map[uint64]*sessionUpdates), } - bs.initMetrics(metrics) go bs.worker() return bs } -func (conn *BatchSet) initMetrics(metrics *monitoring.Metrics) { - var err error - conn.batchSizeBytes, err = metrics.RegisterHistogram("batch_size_bytes") - if err != nil { - log.Printf("can't create batchSizeBytes metric: %s", err) - } - conn.batchSizeLines, err = metrics.RegisterHistogram("batch_size_lines") - if err != nil { - log.Printf("can't create batchSizeLines metric: %s", err) - } - conn.sqlRequestTime, err = metrics.RegisterHistogram("sql_request_time") - if err != nil { - log.Printf("can't create sqlRequestTime metric: %s", err) - } - conn.sqlRequestCounter, err = metrics.RegisterCounter("sql_request_number") - if err != nil { - log.Printf("can't create sqlRequestNumber metric: %s", err) - } -} - func (conn *BatchSet) getBatch(sessionID uint64) *SessionBatch { sessionID = sessionID % 10 if _, ok := conn.batches[sessionID]; !ok { @@ -194,11 +168,10 @@ func (conn *BatchSet) sendBatches(t *batchesTask) { // Append session update sql request to the end of batch batch.Prepare() // Record batch size in bytes and number of lines - conn.batchSizeBytes.Record(context.Background(), float64(batch.Size())) - conn.batchSizeLines.Record(context.Background(), float64(batch.Len())) + database.RecordBatchSize(float64(batch.Size())) + database.RecordBatchElements(float64(batch.Len())) start := time.Now() - isFailed := false // Send batch to db and execute br := conn.c.SendBatch(batch.batch) @@ -209,15 +182,11 @@ func (conn *BatchSet) sendBatches(t *batchesTask) { failedSql := batch.items[i] query := strings.ReplaceAll(failedSql.query, "\n", " ") log.Println("failed sql req:", query, failedSql.arguments) - isFailed = true } } br.Close() // returns err - dur := time.Now().Sub(start).Milliseconds() - conn.sqlRequestTime.Record(context.Background(), float64(dur), - attribute.String("method", "batch"), attribute.Bool("failed", isFailed)) - conn.sqlRequestCounter.Add(context.Background(), 1, - attribute.String("method", "batch"), attribute.Bool("failed", isFailed)) + database.RecordBatchInsertDuration(float64(time.Now().Sub(start).Milliseconds())) + database.IncreaseTotalBatches() } } diff --git a/backend/pkg/db/postgres/bulk.go b/backend/pkg/db/postgres/bulk.go index 8c6c42f78..b6a2ddd35 100644 --- a/backend/pkg/db/postgres/bulk.go +++ b/backend/pkg/db/postgres/bulk.go @@ -2,13 +2,9 @@ package postgres import ( "bytes" - "context" "errors" "fmt" - "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" - "log" - "openreplay/backend/pkg/monitoring" + "openreplay/backend/pkg/metrics/database" "time" ) @@ -25,15 +21,13 @@ type Bulk interface { } type bulkImpl struct { - conn Pool - table string - columns string - template string - setSize int - sizeLimit int - values []interface{} - bulkSize syncfloat64.Histogram - bulkDuration syncfloat64.Histogram + conn Pool + table string + columns string + template string + setSize int + sizeLimit int + values []interface{} } func (b *bulkImpl) Append(args ...interface{}) error { @@ -79,18 +73,15 @@ func (b *bulkImpl) send() error { return fmt.Errorf("send bulk err: %s", err) } // Save bulk metrics - ctx, _ := context.WithTimeout(context.Background(), time.Millisecond*200) - b.bulkDuration.Record(ctx, float64(time.Now().Sub(start).Milliseconds()), attribute.String("table", b.table)) - b.bulkSize.Record(ctx, float64(size), attribute.String("table", b.table)) + database.RecordBulkElements(float64(size), "pg", b.table) + database.RecordBulkInsertDuration(float64(time.Now().Sub(start).Milliseconds()), "pg", b.table) return nil } -func NewBulk(conn Pool, metrics *monitoring.Metrics, table, columns, template string, setSize, sizeLimit int) (Bulk, error) { +func NewBulk(conn Pool, table, columns, template string, setSize, sizeLimit int) (Bulk, error) { switch { case conn == nil: return nil, errors.New("db conn is empty") - case metrics == nil: - return nil, errors.New("metrics is empty") case table == "": return nil, errors.New("table is empty") case columns == "": @@ -102,23 +93,13 @@ func NewBulk(conn Pool, metrics *monitoring.Metrics, table, columns, template st case sizeLimit <= 0: return nil, errors.New("size limit is wrong") } - messagesInBulk, err := metrics.RegisterHistogram("messages_in_bulk") - if err != nil { - log.Printf("can't create messages_size metric: %s", err) - } - bulkInsertDuration, err := metrics.RegisterHistogram("bulk_insert_duration") - if err != nil { - log.Printf("can't create messages_size metric: %s", err) - } return &bulkImpl{ - conn: conn, - table: table, - columns: columns, - template: template, - setSize: setSize, - sizeLimit: sizeLimit, - values: make([]interface{}, 0, setSize*sizeLimit), - bulkSize: messagesInBulk, - bulkDuration: bulkInsertDuration, + conn: conn, + table: table, + columns: columns, + template: template, + setSize: setSize, + sizeLimit: sizeLimit, + values: make([]interface{}, 0, setSize*sizeLimit), }, nil } diff --git a/backend/pkg/db/postgres/bulks.go b/backend/pkg/db/postgres/bulks.go index 5774ba184..f3e9e95c9 100644 --- a/backend/pkg/db/postgres/bulks.go +++ b/backend/pkg/db/postgres/bulks.go @@ -2,7 +2,6 @@ package postgres import ( "log" - "openreplay/backend/pkg/monitoring" "time" ) @@ -30,16 +29,14 @@ type BulkSet struct { webCustomEvents Bulk webClickEvents Bulk webNetworkRequest Bulk - metrics *monitoring.Metrics workerTask chan *bulksTask done chan struct{} finished chan struct{} } -func NewBulkSet(c Pool, metrics *monitoring.Metrics) *BulkSet { +func NewBulkSet(c Pool) *BulkSet { bs := &BulkSet{ c: c, - metrics: metrics, workerTask: make(chan *bulksTask, 1), done: make(chan struct{}), finished: make(chan struct{}), @@ -86,7 +83,7 @@ func (conn *BulkSet) Get(name string) Bulk { func (conn *BulkSet) initBulks() { var err error - conn.autocompletes, err = NewBulk(conn.c, conn.metrics, + conn.autocompletes, err = NewBulk(conn.c, "autocomplete", "(value, type, project_id)", "($%d, $%d, $%d)", @@ -94,7 +91,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create autocomplete bulk: %s", err) } - conn.requests, err = NewBulk(conn.c, conn.metrics, + conn.requests, err = NewBulk(conn.c, "events_common.requests", "(session_id, timestamp, seq_index, url, duration, success)", "($%d, $%d, $%d, LEFT($%d, 8000), $%d, $%d)", @@ -102,7 +99,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create requests bulk: %s", err) } - conn.customEvents, err = NewBulk(conn.c, conn.metrics, + conn.customEvents, err = NewBulk(conn.c, "events_common.customs", "(session_id, timestamp, seq_index, name, payload)", "($%d, $%d, $%d, LEFT($%d, 2000), $%d)", @@ -110,7 +107,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create customEvents bulk: %s", err) } - conn.webPageEvents, err = NewBulk(conn.c, conn.metrics, + conn.webPageEvents, err = NewBulk(conn.c, "events.pages", "(session_id, message_id, timestamp, referrer, base_referrer, host, path, query, dom_content_loaded_time, "+ "load_time, response_end, first_paint_time, first_contentful_paint_time, speed_index, visually_complete, "+ @@ -122,7 +119,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webPageEvents bulk: %s", err) } - conn.webInputEvents, err = NewBulk(conn.c, conn.metrics, + conn.webInputEvents, err = NewBulk(conn.c, "events.inputs", "(session_id, message_id, timestamp, value, label)", "($%d, $%d, $%d, LEFT($%d, 2000), NULLIF(LEFT($%d, 2000),''))", @@ -130,7 +127,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webPageEvents bulk: %s", err) } - conn.webGraphQL, err = NewBulk(conn.c, conn.metrics, + conn.webGraphQL, err = NewBulk(conn.c, "events.graphql", "(session_id, timestamp, message_id, name, request_body, response_body)", "($%d, $%d, $%d, LEFT($%d, 2000), $%d, $%d)", @@ -138,7 +135,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webPageEvents bulk: %s", err) } - conn.webErrors, err = NewBulk(conn.c, conn.metrics, + conn.webErrors, err = NewBulk(conn.c, "errors", "(error_id, project_id, source, name, message, payload)", "($%d, $%d, $%d, $%d, $%d, $%d::jsonb)", @@ -146,7 +143,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webErrors bulk: %s", err) } - conn.webErrorEvents, err = NewBulk(conn.c, conn.metrics, + conn.webErrorEvents, err = NewBulk(conn.c, "events.errors", "(session_id, message_id, timestamp, error_id)", "($%d, $%d, $%d, $%d)", @@ -154,7 +151,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webErrorEvents bulk: %s", err) } - conn.webErrorTags, err = NewBulk(conn.c, conn.metrics, + conn.webErrorTags, err = NewBulk(conn.c, "public.errors_tags", "(session_id, message_id, error_id, key, value)", "($%d, $%d, $%d, $%d, $%d)", @@ -162,7 +159,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webErrorEvents bulk: %s", err) } - conn.webIssues, err = NewBulk(conn.c, conn.metrics, + conn.webIssues, err = NewBulk(conn.c, "issues", "(project_id, issue_id, type, context_string)", "($%d, $%d, $%d, $%d)", @@ -170,7 +167,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webIssues bulk: %s", err) } - conn.webIssueEvents, err = NewBulk(conn.c, conn.metrics, + conn.webIssueEvents, err = NewBulk(conn.c, "events_common.issues", "(session_id, issue_id, timestamp, seq_index, payload)", "($%d, $%d, $%d, $%d, CAST($%d AS jsonb))", @@ -178,7 +175,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webIssueEvents bulk: %s", err) } - conn.webCustomEvents, err = NewBulk(conn.c, conn.metrics, + conn.webCustomEvents, err = NewBulk(conn.c, "events_common.customs", "(session_id, seq_index, timestamp, name, payload, level)", "($%d, $%d, $%d, LEFT($%d, 2000), $%d, $%d)", @@ -186,7 +183,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webCustomEvents bulk: %s", err) } - conn.webClickEvents, err = NewBulk(conn.c, conn.metrics, + conn.webClickEvents, err = NewBulk(conn.c, "events.clicks", "(session_id, message_id, timestamp, label, selector, url, path)", "($%d, $%d, $%d, NULLIF(LEFT($%d, 2000), ''), LEFT($%d, 8000), LEFT($%d, 2000), LEFT($%d, 2000))", @@ -194,7 +191,7 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webClickEvents bulk: %s", err) } - conn.webNetworkRequest, err = NewBulk(conn.c, conn.metrics, + conn.webNetworkRequest, err = NewBulk(conn.c, "events_common.requests", "(session_id, timestamp, seq_index, url, host, path, query, request_body, response_body, status_code, method, duration, success)", "($%d, $%d, $%d, LEFT($%d, 8000), LEFT($%d, 300), LEFT($%d, 2000), LEFT($%d, 8000), $%d, $%d, $%d::smallint, NULLIF($%d, '')::http_method, $%d, $%d)", diff --git a/backend/pkg/db/postgres/connector.go b/backend/pkg/db/postgres/connector.go index 2e8f3d425..6904dc135 100644 --- a/backend/pkg/db/postgres/connector.go +++ b/backend/pkg/db/postgres/connector.go @@ -2,11 +2,10 @@ package postgres import ( "context" - "github.com/jackc/pgx/v4/pgxpool" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "log" + + "github.com/jackc/pgx/v4/pgxpool" "openreplay/backend/pkg/db/types" - "openreplay/backend/pkg/monitoring" ) type CH interface { @@ -15,36 +14,28 @@ type CH interface { // Conn contains batches, bulks and cache for all sessions type Conn struct { - c Pool - batches *BatchSet - bulks *BulkSet - batchSizeBytes syncfloat64.Histogram - batchSizeLines syncfloat64.Histogram - sqlRequestTime syncfloat64.Histogram - sqlRequestCounter syncfloat64.Counter - chConn CH + c Pool + batches *BatchSet + bulks *BulkSet + chConn CH } func (conn *Conn) SetClickHouse(ch CH) { conn.chConn = ch } -func NewConn(url string, queueLimit, sizeLimit int, metrics *monitoring.Metrics) *Conn { - if metrics == nil { - log.Fatalf("metrics is nil") - } +func NewConn(url string, queueLimit, sizeLimit int) *Conn { c, err := pgxpool.Connect(context.Background(), url) if err != nil { log.Fatalf("pgxpool.Connect err: %s", err) } conn := &Conn{} - conn.initMetrics(metrics) - conn.c, err = NewPool(c, conn.sqlRequestTime, conn.sqlRequestCounter) + conn.c, err = NewPool(c) if err != nil { log.Fatalf("can't create new pool wrapper: %s", err) } - conn.bulks = NewBulkSet(conn.c, metrics) - conn.batches = NewBatchSet(conn.c, queueLimit, sizeLimit, metrics) + conn.bulks = NewBulkSet(conn.c) + conn.batches = NewBatchSet(conn.c, queueLimit, sizeLimit) return conn } @@ -55,26 +46,6 @@ func (conn *Conn) Close() error { return nil } -func (conn *Conn) initMetrics(metrics *monitoring.Metrics) { - var err error - conn.batchSizeBytes, err = metrics.RegisterHistogram("batch_size_bytes") - if err != nil { - log.Printf("can't create batchSizeBytes metric: %s", err) - } - conn.batchSizeLines, err = metrics.RegisterHistogram("batch_size_lines") - if err != nil { - log.Printf("can't create batchSizeLines metric: %s", err) - } - conn.sqlRequestTime, err = metrics.RegisterHistogram("sql_request_time") - if err != nil { - log.Printf("can't create sqlRequestTime metric: %s", err) - } - conn.sqlRequestCounter, err = metrics.RegisterCounter("sql_request_number") - if err != nil { - log.Printf("can't create sqlRequestNumber metric: %s", err) - } -} - func (conn *Conn) insertAutocompleteValue(sessionID uint64, projectID uint32, tp string, value string) { if len(value) == 0 { return diff --git a/backend/pkg/db/postgres/pool.go b/backend/pkg/db/postgres/pool.go index 5f9cbaa29..5214be8d0 100644 --- a/backend/pkg/db/postgres/pool.go +++ b/backend/pkg/db/postgres/pool.go @@ -3,12 +3,12 @@ package postgres import ( "context" "errors" - "github.com/jackc/pgx/v4" - "github.com/jackc/pgx/v4/pgxpool" - "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "strings" "time" + + "github.com/jackc/pgx/v4" + "github.com/jackc/pgx/v4/pgxpool" + "openreplay/backend/pkg/metrics/database" ) // Pool is a pgx.Pool wrapper with metrics integration @@ -22,19 +22,15 @@ type Pool interface { } type poolImpl struct { - conn *pgxpool.Pool - sqlRequestTime syncfloat64.Histogram - sqlRequestCounter syncfloat64.Counter + conn *pgxpool.Pool } func (p *poolImpl) Query(sql string, args ...interface{}) (pgx.Rows, error) { start := time.Now() res, err := p.conn.Query(getTimeoutContext(), sql, args...) method, table := methodName(sql) - p.sqlRequestTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()), - attribute.String("method", method), attribute.String("table", table)) - p.sqlRequestCounter.Add(context.Background(), 1, - attribute.String("method", method), attribute.String("table", table)) + database.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table) + database.IncreaseTotalRequests(method, table) return res, err } @@ -42,10 +38,8 @@ func (p *poolImpl) QueryRow(sql string, args ...interface{}) pgx.Row { start := time.Now() res := p.conn.QueryRow(getTimeoutContext(), sql, args...) method, table := methodName(sql) - p.sqlRequestTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()), - attribute.String("method", method), attribute.String("table", table)) - p.sqlRequestCounter.Add(context.Background(), 1, - attribute.String("method", method), attribute.String("table", table)) + database.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table) + database.IncreaseTotalRequests(method, table) return res } @@ -53,45 +47,37 @@ func (p *poolImpl) Exec(sql string, arguments ...interface{}) error { start := time.Now() _, err := p.conn.Exec(getTimeoutContext(), sql, arguments...) method, table := methodName(sql) - p.sqlRequestTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()), - attribute.String("method", method), attribute.String("table", table)) - p.sqlRequestCounter.Add(context.Background(), 1, - attribute.String("method", method), attribute.String("table", table)) + database.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table) + database.IncreaseTotalRequests(method, table) return err } func (p *poolImpl) SendBatch(b *pgx.Batch) pgx.BatchResults { start := time.Now() res := p.conn.SendBatch(getTimeoutContext(), b) - p.sqlRequestTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()), - attribute.String("method", "sendBatch")) - p.sqlRequestCounter.Add(context.Background(), 1, - attribute.String("method", "sendBatch")) + database.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "sendBatch", "") + database.IncreaseTotalRequests("sendBatch", "") return res } func (p *poolImpl) Begin() (*_Tx, error) { start := time.Now() tx, err := p.conn.Begin(context.Background()) - p.sqlRequestTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()), - attribute.String("method", "begin")) - p.sqlRequestCounter.Add(context.Background(), 1, - attribute.String("method", "begin")) - return &_Tx{tx, p.sqlRequestTime, p.sqlRequestCounter}, err + database.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "begin", "") + database.IncreaseTotalRequests("begin", "") + return &_Tx{tx}, err } func (p *poolImpl) Close() { p.conn.Close() } -func NewPool(conn *pgxpool.Pool, sqlRequestTime syncfloat64.Histogram, sqlRequestCounter syncfloat64.Counter) (Pool, error) { +func NewPool(conn *pgxpool.Pool) (Pool, error) { if conn == nil { return nil, errors.New("conn is empty") } return &poolImpl{ - conn: conn, - sqlRequestTime: sqlRequestTime, - sqlRequestCounter: sqlRequestCounter, + conn: conn, }, nil } @@ -99,38 +85,30 @@ func NewPool(conn *pgxpool.Pool, sqlRequestTime syncfloat64.Histogram, sqlReques type _Tx struct { pgx.Tx - sqlRequestTime syncfloat64.Histogram - sqlRequestCounter syncfloat64.Counter } func (tx *_Tx) exec(sql string, args ...interface{}) error { start := time.Now() _, err := tx.Exec(context.Background(), sql, args...) method, table := methodName(sql) - tx.sqlRequestTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()), - attribute.String("method", method), attribute.String("table", table)) - tx.sqlRequestCounter.Add(context.Background(), 1, - attribute.String("method", method), attribute.String("table", table)) + database.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table) + database.IncreaseTotalRequests(method, table) return err } func (tx *_Tx) rollback() error { start := time.Now() err := tx.Rollback(context.Background()) - tx.sqlRequestTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()), - attribute.String("method", "rollback")) - tx.sqlRequestCounter.Add(context.Background(), 1, - attribute.String("method", "rollback")) + database.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "rollback", "") + database.IncreaseTotalRequests("rollback", "") return err } func (tx *_Tx) commit() error { start := time.Now() err := tx.Commit(context.Background()) - tx.sqlRequestTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()), - attribute.String("method", "commit")) - tx.sqlRequestCounter.Add(context.Background(), 1, - attribute.String("method", "commit")) + database.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "commit", "") + database.IncreaseTotalRequests("commit", "") return err } @@ -169,7 +147,8 @@ func methodName(sql string) (string, string) { case "update": table = strings.TrimSpace(parts[1]) case "insert": - table = strings.TrimSpace(parts[2]) + tableNameParts := strings.Split(strings.TrimSpace(parts[2]), "(") + table = tableNameParts[0] } return cmd, table } diff --git a/backend/pkg/messages/iterator-sink.go b/backend/pkg/messages/iterator-sink.go index a5897c3b7..be12b63eb 100644 --- a/backend/pkg/messages/iterator-sink.go +++ b/backend/pkg/messages/iterator-sink.go @@ -3,6 +3,7 @@ package messages import ( "fmt" "log" + "openreplay/backend/pkg/metrics/sink" ) type sinkMessageIteratorImpl struct { @@ -53,6 +54,8 @@ func (i *sinkMessageIteratorImpl) sendBatchEnd() { } func (i *sinkMessageIteratorImpl) Iterate(batchData []byte, batchInfo *BatchInfo) { + sink.RecordBatchSize(float64(len(batchData))) + sink.IncreaseTotalBatches() // Create new message reader reader := NewMessageReader(batchData) diff --git a/backend/pkg/messages/iterator.go b/backend/pkg/messages/iterator.go index a6717257e..f7b014d30 100644 --- a/backend/pkg/messages/iterator.go +++ b/backend/pkg/messages/iterator.go @@ -74,12 +74,13 @@ func (i *messageIteratorImpl) Iterate(batchData []byte, batchInfo *BatchInfo) { i.messageInfo.Index++ msg := reader.Message() + msgType := msg.TypeID() // Preprocess "system" messages if _, ok := i.preFilter[msg.TypeID()]; ok { msg = msg.Decode() if msg == nil { - log.Printf("decode error, type: %d, info: %s", msg.TypeID(), i.batchInfo.Info()) + log.Printf("decode error, type: %d, info: %s", msgType, i.batchInfo.Info()) return } msg = transformDeprecated(msg) @@ -99,7 +100,7 @@ func (i *messageIteratorImpl) Iterate(batchData []byte, batchInfo *BatchInfo) { if i.autoDecode { msg = msg.Decode() if msg == nil { - log.Printf("decode error, type: %d, info: %s", msg.TypeID(), i.batchInfo.Info()) + log.Printf("decode error, type: %d, info: %s", msgType, i.batchInfo.Info()) return } } diff --git a/backend/pkg/metrics/assets/metrics.go b/backend/pkg/metrics/assets/metrics.go new file mode 100644 index 000000000..44af0dfa9 --- /dev/null +++ b/backend/pkg/metrics/assets/metrics.go @@ -0,0 +1,72 @@ +package assets + +import ( + "github.com/prometheus/client_golang/prometheus" + "openreplay/backend/pkg/metrics/common" + "strconv" +) + +var assetsProcessedSessions = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "assets", + Name: "processed_total", + Help: "A counter displaying the total count of processed assets.", + }, +) + +func IncreaseProcessesSessions() { + assetsProcessedSessions.Inc() +} + +var assetsSavedSessions = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "assets", + Name: "saved_total", + Help: "A counter displaying the total number of cached assets.", + }, +) + +func IncreaseSavedSessions() { + assetsSavedSessions.Inc() +} + +var assetsDownloadDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "assets", + Name: "download_duration_seconds", + Help: "A histogram displaying the duration of downloading for each asset in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"response_code"}, +) + +func RecordDownloadDuration(durMillis float64, code int) { + assetsDownloadDuration.WithLabelValues(strconv.Itoa(code)).Observe(durMillis / 1000.0) +} + +var assetsUploadDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "assets", + Name: "upload_s3_duration_seconds", + Help: "A histogram displaying the duration of uploading to s3 for each asset in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"failed"}, +) + +func RecordUploadDuration(durMillis float64, isFailed bool) { + failed := "false" + if isFailed { + failed = "true" + } + assetsUploadDuration.WithLabelValues(failed).Observe(durMillis / 1000.0) +} + +func List() []prometheus.Collector { + return []prometheus.Collector{ + assetsProcessedSessions, + assetsSavedSessions, + assetsDownloadDuration, + assetsUploadDuration, + } +} diff --git a/backend/pkg/metrics/common/metrics.go b/backend/pkg/metrics/common/metrics.go new file mode 100644 index 000000000..85b66c713 --- /dev/null +++ b/backend/pkg/metrics/common/metrics.go @@ -0,0 +1,11 @@ +package common + +// DefaultDurationBuckets is a set of buckets from 5 milliseconds to 1000 seconds (16.6667 minutes) +var DefaultDurationBuckets = []float64{.005, .01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10, 25, 50, 100, 250, 500, 1000} + +// DefaultSizeBuckets is a set of buckets from 1 byte to 1_000_000_000 bytes (~1 Gb) +var DefaultSizeBuckets = []float64{1, 10, 50, 100, 250, 500, 1000, 2500, 5000, 10000, 25000, 50000, 100_000, 250_000, + 500_000, 1_000_000, 10_000_000, 100_000_000, 1_000_000_000} + +// DefaultBuckets is a set of buckets from 1 to 1_000_000 elements +var DefaultBuckets = []float64{1, 2, 5, 10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10_000, 50_000, 100_000, 1_000_000} diff --git a/backend/pkg/metrics/database/metrics.go b/backend/pkg/metrics/database/metrics.go new file mode 100644 index 000000000..a9f3990cd --- /dev/null +++ b/backend/pkg/metrics/database/metrics.go @@ -0,0 +1,127 @@ +package database + +import ( + "github.com/prometheus/client_golang/prometheus" + "openreplay/backend/pkg/metrics/common" +) + +var dbBatchSize = prometheus.NewHistogram( + prometheus.HistogramOpts{ + Namespace: "db", + Name: "batch_size_bytes", + Help: "A histogram displaying the batch size in bytes.", + Buckets: common.DefaultSizeBuckets, + }, +) + +func RecordBatchSize(size float64) { + dbBatchSize.Observe(size) +} + +var dbBatchElements = prometheus.NewHistogram( + prometheus.HistogramOpts{ + Namespace: "db", + Name: "batch_size_elements", + Help: "A histogram displaying the number of SQL commands in each batch.", + Buckets: common.DefaultBuckets, + }, +) + +func RecordBatchElements(number float64) { + dbBatchElements.Observe(number) +} + +var dbBatchInsertDuration = prometheus.NewHistogram( + prometheus.HistogramOpts{ + Namespace: "db", + Name: "batch_insert_duration_seconds", + Help: "A histogram displaying the duration of batch inserts in seconds.", + Buckets: common.DefaultDurationBuckets, + }, +) + +func RecordBatchInsertDuration(durMillis float64) { + dbBatchInsertDuration.Observe(durMillis / 1000.0) +} + +var dbBulkSize = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "db", + Name: "bulk_size_bytes", + Help: "A histogram displaying the bulk size in bytes.", + Buckets: common.DefaultSizeBuckets, + }, + []string{"db", "table"}, +) + +func RecordBulkSize(size float64, db, table string) { + dbBulkSize.WithLabelValues(db, table).Observe(size) +} + +var dbBulkElements = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "db", + Name: "bulk_size_elements", + Help: "A histogram displaying the size of data set in each bulk.", + Buckets: common.DefaultBuckets, + }, + []string{"db", "table"}, +) + +func RecordBulkElements(size float64, db, table string) { + dbBulkElements.WithLabelValues(db, table).Observe(size) +} + +var dbBulkInsertDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "db", + Name: "bulk_insert_duration_seconds", + Help: "A histogram displaying the duration of bulk inserts in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"db", "table"}, +) + +func RecordBulkInsertDuration(durMillis float64, db, table string) { + dbBulkInsertDuration.WithLabelValues(db, table).Observe(durMillis / 1000.0) +} + +var dbRequestDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "db", + Name: "request_duration_seconds", + Help: "A histogram displaying the duration of each sql request in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"method", "table"}, +) + +func RecordRequestDuration(durMillis float64, method, table string) { + dbRequestDuration.WithLabelValues(method, table).Observe(durMillis / 1000.0) +} + +var dbTotalRequests = prometheus.NewCounterVec( + prometheus.CounterOpts{ + Namespace: "db", + Name: "requests_total", + Help: "A counter showing the total number of all SQL requests.", + }, + []string{"method", "table"}, +) + +func IncreaseTotalRequests(method, table string) { + dbTotalRequests.WithLabelValues(method, table).Inc() +} + +func List() []prometheus.Collector { + return []prometheus.Collector{ + dbBatchSize, + dbBatchElements, + dbBatchInsertDuration, + dbBulkSize, + dbBulkElements, + dbBulkInsertDuration, + dbRequestDuration, + dbTotalRequests, + } +} diff --git a/backend/pkg/metrics/ender/metrics.go b/backend/pkg/metrics/ender/metrics.go new file mode 100644 index 000000000..5e3308554 --- /dev/null +++ b/backend/pkg/metrics/ender/metrics.go @@ -0,0 +1,51 @@ +package ender + +import "github.com/prometheus/client_golang/prometheus" + +var enderActiveSessions = prometheus.NewGauge( + prometheus.GaugeOpts{ + Namespace: "ender", + Name: "sessions_active", + Help: "A gauge displaying the number of active (live) sessions.", + }, +) + +func IncreaseActiveSessions() { + enderActiveSessions.Inc() +} + +func DecreaseActiveSessions() { + enderActiveSessions.Dec() +} + +var enderClosedSessions = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "ender", + Name: "sessions_closed", + Help: "A counter displaying the number of closed sessions (sent SessionEnd).", + }, +) + +func IncreaseClosedSessions() { + enderClosedSessions.Inc() +} + +var enderTotalSessions = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "ender", + Name: "sessions_total", + Help: "A counter displaying the number of all processed sessions.", + }, +) + +func IncreaseTotalSessions() { + enderTotalSessions.Inc() +} + +func List() []prometheus.Collector { + return []prometheus.Collector{ + enderActiveSessions, + enderClosedSessions, + enderTotalSessions, + } +} diff --git a/backend/pkg/metrics/http/metrics.go b/backend/pkg/metrics/http/metrics.go new file mode 100644 index 000000000..7a835d7f6 --- /dev/null +++ b/backend/pkg/metrics/http/metrics.go @@ -0,0 +1,55 @@ +package http + +import ( + "github.com/prometheus/client_golang/prometheus" + "openreplay/backend/pkg/metrics/common" + "strconv" +) + +var httpRequestSize = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "http", + Name: "request_size_bytes", + Help: "A histogram displaying the size of each HTTP request in bytes.", + Buckets: common.DefaultSizeBuckets, + }, + []string{"url", "response_code"}, +) + +func RecordRequestSize(size float64, url string, code int) { + httpRequestSize.WithLabelValues(url, strconv.Itoa(code)).Observe(size) +} + +var httpRequestDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "http", + Name: "request_duration_seconds", + Help: "A histogram displaying the duration of each HTTP request in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"url", "response_code"}, +) + +func RecordRequestDuration(durMillis float64, url string, code int) { + httpRequestDuration.WithLabelValues(url, strconv.Itoa(code)).Observe(durMillis / 1000.0) +} + +var httpTotalRequests = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "http", + Name: "requests_total", + Help: "A counter displaying the number all HTTP requests.", + }, +) + +func IncreaseTotalRequests() { + httpTotalRequests.Inc() +} + +func List() []prometheus.Collector { + return []prometheus.Collector{ + httpRequestSize, + httpRequestDuration, + httpTotalRequests, + } +} diff --git a/backend/pkg/metrics/server.go b/backend/pkg/metrics/server.go new file mode 100644 index 000000000..fb3be5afc --- /dev/null +++ b/backend/pkg/metrics/server.go @@ -0,0 +1,40 @@ +package metrics + +import ( + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/collectors" + "github.com/prometheus/client_golang/prometheus/promhttp" + "log" + "net/http" +) + +type MetricServer struct { + registry *prometheus.Registry +} + +func New() *MetricServer { + registry := prometheus.NewRegistry() + // Add go runtime metrics and process collectors. + registry.MustRegister( + collectors.NewGoCollector(), + collectors.NewProcessCollector(collectors.ProcessCollectorOpts{}), + ) + // Expose /metrics HTTP endpoint using the created custom registry. + http.Handle( + "/metrics", promhttp.HandlerFor( + registry, + promhttp.HandlerOpts{ + EnableOpenMetrics: true, + }), + ) + go func() { + log.Println(http.ListenAndServe(":8888", nil)) + }() + return &MetricServer{ + registry: registry, + } +} + +func (s *MetricServer) Register(cs []prometheus.Collector) { + s.registry.MustRegister(cs...) +} diff --git a/backend/pkg/metrics/sink/metrics.go b/backend/pkg/metrics/sink/metrics.go new file mode 100644 index 000000000..52cb73ba1 --- /dev/null +++ b/backend/pkg/metrics/sink/metrics.go @@ -0,0 +1,185 @@ +package sink + +import ( + "github.com/prometheus/client_golang/prometheus" + "openreplay/backend/pkg/metrics/common" +) + +var sinkMessageSize = prometheus.NewHistogram( + prometheus.HistogramOpts{ + Namespace: "sink", + Name: "message_size_bytes", + Help: "A histogram displaying the size of each message in bytes.", + Buckets: common.DefaultSizeBuckets, + }, +) + +func RecordMessageSize(size float64) { + sinkMessageSize.Observe(size) +} + +var sinkWrittenMessages = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "sink", + Name: "messages_written", + Help: "A counter displaying the total number of all written messages.", + }, +) + +func IncreaseWrittenMessages() { + sinkWrittenMessages.Inc() +} + +var sinkTotalMessages = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "sink", + Name: "messages_total", + Help: "A counter displaying the total number of all processed messages.", + }, +) + +func IncreaseTotalMessages() { + sinkTotalMessages.Inc() +} + +var sinkBatchSize = prometheus.NewHistogram( + prometheus.HistogramOpts{ + Namespace: "sink", + Name: "batch_size_bytes", + Help: "A histogram displaying the size of each batch in bytes.", + Buckets: common.DefaultSizeBuckets, + }, +) + +func RecordBatchSize(size float64) { + sinkBatchSize.Observe(size) +} + +var sinkTotalBatches = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "sink", + Name: "batches_total", + Help: "A counter displaying the total number of all written batches.", + }, +) + +func IncreaseTotalBatches() { + sinkTotalBatches.Inc() +} + +var sinkWrittenBytes = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "sink", + Name: "written_bytes", + Help: "A histogram displaying the size of buffer in bytes written to session file.", + Buckets: common.DefaultSizeBuckets, + }, + []string{"file_type"}, +) + +func RecordWrittenBytes(size float64, fileType string) { + if size == 0 { + return + } + sinkWrittenBytes.WithLabelValues(fileType).Observe(size) + IncreaseTotalWrittenBytes(size, fileType) +} + +var sinkTotalWrittenBytes = prometheus.NewCounterVec( + prometheus.CounterOpts{ + Namespace: "sink", + Name: "written_bytes_total", + Help: "A counter displaying the total number of bytes written to all session files.", + }, + []string{"file_type"}, +) + +func IncreaseTotalWrittenBytes(size float64, fileType string) { + if size == 0 { + return + } + sinkTotalWrittenBytes.WithLabelValues(fileType).Add(size) +} + +var sinkCachedAssets = prometheus.NewGauge( + prometheus.GaugeOpts{ + Namespace: "sink", + Name: "assets_cached", + Help: "A gauge displaying the current number of cached assets.", + }, +) + +func IncreaseCachedAssets() { + sinkCachedAssets.Inc() +} + +func DecreaseCachedAssets() { + sinkCachedAssets.Dec() +} + +var sinkSkippedAssets = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "sink", + Name: "assets_skipped", + Help: "A counter displaying the total number of all skipped assets.", + }, +) + +func IncreaseSkippedAssets() { + sinkSkippedAssets.Inc() +} + +var sinkTotalAssets = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "sink", + Name: "assets_total", + Help: "A counter displaying the total number of all processed assets.", + }, +) + +func IncreaseTotalAssets() { + sinkTotalAssets.Inc() +} + +var sinkAssetSize = prometheus.NewHistogram( + prometheus.HistogramOpts{ + Namespace: "sink", + Name: "asset_size_bytes", + Help: "A histogram displaying the size of each asset in bytes.", + Buckets: common.DefaultSizeBuckets, + }, +) + +func RecordAssetSize(size float64) { + sinkAssetSize.Observe(size) +} + +var sinkProcessAssetDuration = prometheus.NewHistogram( + prometheus.HistogramOpts{ + Namespace: "sink", + Name: "asset_process_duration_seconds", + Help: "A histogram displaying the duration of processing for each asset in seconds.", + Buckets: common.DefaultDurationBuckets, + }, +) + +func RecordProcessAssetDuration(durMillis float64) { + sinkProcessAssetDuration.Observe(durMillis / 1000.0) +} + +func List() []prometheus.Collector { + return []prometheus.Collector{ + sinkMessageSize, + sinkWrittenMessages, + sinkTotalMessages, + sinkBatchSize, + sinkTotalBatches, + sinkWrittenBytes, + sinkTotalWrittenBytes, + sinkCachedAssets, + sinkSkippedAssets, + sinkTotalAssets, + sinkAssetSize, + sinkProcessAssetDuration, + } +} diff --git a/backend/pkg/metrics/storage/metrics.go b/backend/pkg/metrics/storage/metrics.go new file mode 100644 index 000000000..26459c90d --- /dev/null +++ b/backend/pkg/metrics/storage/metrics.go @@ -0,0 +1,114 @@ +package storage + +import ( + "github.com/prometheus/client_golang/prometheus" + "openreplay/backend/pkg/metrics/common" +) + +var storageSessionSize = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "storage", + Name: "session_size_bytes", + Help: "A histogram displaying the size of each session file in bytes prior to any manipulation.", + Buckets: common.DefaultSizeBuckets, + }, + []string{"file_type"}, +) + +func RecordSessionSize(fileSize float64, fileType string) { + storageSessionSize.WithLabelValues(fileType).Observe(fileSize) +} + +var storageTotalSessions = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "storage", + Name: "sessions_total", + Help: "A counter displaying the total number of all processed sessions.", + }, +) + +func IncreaseStorageTotalSessions() { + storageTotalSessions.Inc() +} + +var storageSessionReadDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "storage", + Name: "read_duration_seconds", + Help: "A histogram displaying the duration of reading for each session in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"file_type"}, +) + +func RecordSessionReadDuration(durMillis float64, fileType string) { + storageSessionReadDuration.WithLabelValues(fileType).Observe(durMillis / 1000.0) +} + +var storageSessionSortDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "storage", + Name: "sort_duration_seconds", + Help: "A histogram displaying the duration of sorting for each session in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"file_type"}, +) + +func RecordSessionSortDuration(durMillis float64, fileType string) { + storageSessionSortDuration.WithLabelValues(fileType).Observe(durMillis / 1000.0) +} + +var storageSessionEncodeDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "storage", + Name: "encode_duration_seconds", + Help: "A histogram displaying the duration of encoding for each session in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"file_type"}, +) + +func RecordSessionEncodeDuration(durMillis float64, fileType string) { + storageSessionEncodeDuration.WithLabelValues(fileType).Observe(durMillis / 1000.0) +} + +var storageSessionCompressDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "storage", + Name: "compress_duration_seconds", + Help: "A histogram displaying the duration of compressing for each session in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"file_type"}, +) + +func RecordSessionCompressDuration(durMillis float64, fileType string) { + storageSessionCompressDuration.WithLabelValues(fileType).Observe(durMillis / 1000.0) +} + +var storageSessionUploadDuration = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "storage", + Name: "upload_duration_seconds", + Help: "A histogram displaying the duration of uploading to s3 for each session in seconds.", + Buckets: common.DefaultDurationBuckets, + }, + []string{"file_type"}, +) + +func RecordSessionUploadDuration(durMillis float64, fileType string) { + storageSessionUploadDuration.WithLabelValues(fileType).Observe(durMillis / 1000.0) +} + +func List() []prometheus.Collector { + return []prometheus.Collector{ + storageSessionSize, + storageTotalSessions, + storageSessionReadDuration, + storageSessionSortDuration, + storageSessionEncodeDuration, + storageSessionCompressDuration, + storageSessionUploadDuration, + } +} diff --git a/ee/backend/pkg/db/clickhouse/bulk.go b/ee/backend/pkg/db/clickhouse/bulk.go index 706b66f68..6eb8d98fd 100644 --- a/ee/backend/pkg/db/clickhouse/bulk.go +++ b/ee/backend/pkg/db/clickhouse/bulk.go @@ -5,6 +5,8 @@ import ( "errors" "fmt" "log" + "openreplay/backend/pkg/metrics/database" + "time" "github.com/ClickHouse/clickhouse-go/v2/lib/driver" ) @@ -16,19 +18,23 @@ type Bulk interface { type bulkImpl struct { conn driver.Conn + table string query string values [][]interface{} } -func NewBulk(conn driver.Conn, query string) (Bulk, error) { +func NewBulk(conn driver.Conn, table, query string) (Bulk, error) { switch { case conn == nil: return nil, errors.New("clickhouse connection is empty") + case table == "": + return nil, errors.New("table is empty") case query == "": return nil, errors.New("query is empty") } return &bulkImpl{ conn: conn, + table: table, query: query, values: make([][]interface{}, 0), }, nil @@ -40,6 +46,7 @@ func (b *bulkImpl) Append(args ...interface{}) error { } func (b *bulkImpl) Send() error { + start := time.Now() batch, err := b.conn.PrepareBatch(context.Background(), b.query) if err != nil { return fmt.Errorf("can't create new batch: %s", err) @@ -50,6 +57,11 @@ func (b *bulkImpl) Send() error { log.Printf("failed query: %s", b.query) } } + err = batch.Send() + // Save bulk metrics + database.RecordBulkElements(float64(len(b.values)), "ch", b.table) + database.RecordBulkInsertDuration(float64(time.Now().Sub(start).Milliseconds()), "ch", b.table) + // Prepare values slice for a new data b.values = make([][]interface{}, 0) - return batch.Send() + return err } diff --git a/ee/backend/pkg/db/clickhouse/connector.go b/ee/backend/pkg/db/clickhouse/connector.go index 157d384b9..b872adcc2 100644 --- a/ee/backend/pkg/db/clickhouse/connector.go +++ b/ee/backend/pkg/db/clickhouse/connector.go @@ -3,18 +3,16 @@ package clickhouse import ( "errors" "fmt" + "github.com/ClickHouse/clickhouse-go/v2" + "github.com/ClickHouse/clickhouse-go/v2/lib/driver" "log" "openreplay/backend/pkg/db/types" "openreplay/backend/pkg/hashid" "openreplay/backend/pkg/messages" "openreplay/backend/pkg/url" - "os" "strings" "time" - "github.com/ClickHouse/clickhouse-go/v2" - "github.com/ClickHouse/clickhouse-go/v2/lib/driver" - "openreplay/backend/pkg/license" ) @@ -52,28 +50,14 @@ type connectorImpl struct { finished chan struct{} } -// Check env variables. If not present, return default value. -func getEnv(key, fallback string) string { - if value, ok := os.LookupEnv(key); ok { - return value - } - return fallback -} - func NewConnector(url string) Connector { license.CheckLicense() - // Check username, password, database - userName := getEnv("CH_USERNAME", "default") - password := getEnv("CH_PASSWORD", "") - database := getEnv("CH_DATABASE", "default") url = strings.TrimPrefix(url, "tcp://") - url = strings.TrimSuffix(url, "/"+database) + url = strings.TrimSuffix(url, "/default") conn, err := clickhouse.Open(&clickhouse.Options{ Addr: []string{url}, Auth: clickhouse.Auth{ - Database: database, - Username: userName, - Password: password, + Database: "default", }, MaxOpenConns: 20, MaxIdleConns: 15, @@ -99,7 +83,7 @@ func NewConnector(url string) Connector { } func (c *connectorImpl) newBatch(name, query string) error { - batch, err := NewBulk(c.conn, query) + batch, err := NewBulk(c.conn, name, query) if err != nil { return fmt.Errorf("can't create new batch: %s", err) } From 5e846245e6262b01d1b4cdb71ac4f08898783856 Mon Sep 17 00:00:00 2001 From: Alexander Zavorotynskiy Date: Mon, 20 Feb 2023 16:45:00 +0100 Subject: [PATCH 051/218] fix(backend): removed wrong line from batchSet --- backend/pkg/db/postgres/batches.go | 1 - 1 file changed, 1 deletion(-) diff --git a/backend/pkg/db/postgres/batches.go b/backend/pkg/db/postgres/batches.go index abdee36f2..8b9f2484d 100644 --- a/backend/pkg/db/postgres/batches.go +++ b/backend/pkg/db/postgres/batches.go @@ -186,7 +186,6 @@ func (conn *BatchSet) sendBatches(t *batchesTask) { } br.Close() // returns err database.RecordBatchInsertDuration(float64(time.Now().Sub(start).Milliseconds())) - database.IncreaseTotalBatches() } } From 596ddb284da241d326fa5e7176f0b27ea5665573 Mon Sep 17 00:00:00 2001 From: Alexander Zavorotynskiy Date: Mon, 20 Feb 2023 16:56:00 +0100 Subject: [PATCH 052/218] fix(backend): upgrade /x/net library to avoid vulnerabilities --- backend/go.mod | 8 ++++---- backend/go.sum | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/backend/go.mod b/backend/go.mod index 0615fb0cb..e11b839fa 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -20,14 +20,14 @@ require ( github.com/klauspost/pgzip v1.2.5 github.com/oschwald/maxminddb-golang v1.7.0 github.com/pkg/errors v0.9.1 + github.com/prometheus/client_golang v1.12.1 github.com/sethvargo/go-envconfig v0.7.0 github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce github.com/ua-parser/uap-go v0.0.0-20200325213135-e1c09f13e2fe - go.opentelemetry.io/otel v1.7.0 go.opentelemetry.io/otel/exporters/prometheus v0.30.0 go.opentelemetry.io/otel/metric v0.30.0 go.opentelemetry.io/otel/sdk/metric v0.30.0 - golang.org/x/net v0.0.0-20220906165146-f3363e06e74c + golang.org/x/net v0.1.1-0.20221104162952-702349b0e862 google.golang.org/api v0.81.0 ) @@ -55,19 +55,19 @@ require ( github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect github.com/paulmach/orb v0.7.1 // indirect github.com/pierrec/lz4/v4 v4.1.15 // indirect - github.com/prometheus/client_golang v1.12.1 // indirect github.com/prometheus/client_model v0.2.0 // indirect github.com/prometheus/common v0.32.1 // indirect github.com/prometheus/procfs v0.7.3 // indirect github.com/shopspring/decimal v1.3.1 // indirect github.com/stretchr/testify v1.8.0 // indirect go.opencensus.io v0.23.0 // indirect + go.opentelemetry.io/otel v1.7.0 // indirect go.opentelemetry.io/otel/sdk v1.7.0 // indirect go.opentelemetry.io/otel/trace v1.7.0 // indirect golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4 // indirect golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 // indirect golang.org/x/sync v0.0.0-20220513210516-0976fa681c29 // indirect - golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 // indirect + golang.org/x/sys v0.1.0 // indirect golang.org/x/text v0.4.0 // indirect golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df // indirect google.golang.org/appengine v1.6.7 // indirect diff --git a/backend/go.sum b/backend/go.sum index 5aa3ae3de..7b33d881d 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -601,8 +601,8 @@ golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220906165146-f3363e06e74c h1:yKufUcDwucU5urd+50/Opbt4AYpqthk7wHpHok8f1lo= -golang.org/x/net v0.0.0-20220906165146-f3363e06e74c/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/net v0.1.1-0.20221104162952-702349b0e862 h1:KrLJ+iz8J6j6VVr/OCfULAcK+xozUmWE43fKpMR4MlI= +golang.org/x/net v0.1.1-0.20221104162952-702349b0e862/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -715,8 +715,8 @@ golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220429233432-b5fbb4746d32/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 h1:WIoqL4EROvwiPdUtaip4VcDdpZ4kha7wBWZrbVKCIZg= -golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0 h1:kunALQeHf1/185U1i0GOB/fy1IPRDDpuoOOqRReG57U= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= From dc9f3f79eeba1d125511aa0606beda8ec90ae50a Mon Sep 17 00:00:00 2001 From: Alexander Zavorotynskiy Date: Mon, 20 Feb 2023 16:59:57 +0100 Subject: [PATCH 053/218] feat(backend): clean up go modules --- backend/go.mod | 6 -- backend/go.sum | 13 --- backend/pkg/monitoring/metrics.go | 138 ------------------------------ 3 files changed, 157 deletions(-) delete mode 100644 backend/pkg/monitoring/metrics.go diff --git a/backend/go.mod b/backend/go.mod index e11b839fa..161513ed8 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -24,9 +24,6 @@ require ( github.com/sethvargo/go-envconfig v0.7.0 github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce github.com/ua-parser/uap-go v0.0.0-20200325213135-e1c09f13e2fe - go.opentelemetry.io/otel/exporters/prometheus v0.30.0 - go.opentelemetry.io/otel/metric v0.30.0 - go.opentelemetry.io/otel/sdk/metric v0.30.0 golang.org/x/net v0.1.1-0.20221104162952-702349b0e862 google.golang.org/api v0.81.0 ) @@ -38,8 +35,6 @@ require ( cloud.google.com/go/storage v1.14.0 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/cespare/xxhash/v2 v2.1.2 // indirect - github.com/go-logr/logr v1.2.3 // indirect - github.com/go-logr/stdr v1.2.2 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.2 // indirect github.com/google/go-cmp v0.5.8 // indirect @@ -62,7 +57,6 @@ require ( github.com/stretchr/testify v1.8.0 // indirect go.opencensus.io v0.23.0 // indirect go.opentelemetry.io/otel v1.7.0 // indirect - go.opentelemetry.io/otel/sdk v1.7.0 // indirect go.opentelemetry.io/otel/trace v1.7.0 // indirect golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4 // indirect golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 // indirect diff --git a/backend/go.sum b/backend/go.sum index 7b33d881d..de6d507d3 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -80,8 +80,6 @@ github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk5 github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/aws/aws-sdk-go v1.44.98 h1:fX+NxebSdO/9T6DTNOLhpC+Vv6RNkKRfsMg0a7o/yBo= github.com/aws/aws-sdk-go v1.44.98/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= -github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A= -github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= @@ -156,9 +154,7 @@ github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9 github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-ole/go-ole v1.2.4/go.mod h1:XCwSNxSkXRo4vlyPy93sltvi/qJq0jqQhjqQNIwKuxM= github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= @@ -489,14 +485,6 @@ go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opentelemetry.io/otel v1.7.0 h1:Z2lA3Tdch0iDcrhJXDIlC94XE+bxok1F9B+4Lz/lGsM= go.opentelemetry.io/otel v1.7.0/go.mod h1:5BdUoMIz5WEs0vt0CUEMtSSaTSHBBVwrhnz7+nrD5xk= -go.opentelemetry.io/otel/exporters/prometheus v0.30.0 h1:YXo5ZY5nofaEYMCMTTMaRH2cLDZB8+0UGuk5RwMfIo0= -go.opentelemetry.io/otel/exporters/prometheus v0.30.0/go.mod h1:qN5feW+0/d661KDtJuATEmHtw5bKBK7NSvNEP927zSs= -go.opentelemetry.io/otel/metric v0.30.0 h1:Hs8eQZ8aQgs0U49diZoaS6Uaxw3+bBE3lcMUKBFIk3c= -go.opentelemetry.io/otel/metric v0.30.0/go.mod h1:/ShZ7+TS4dHzDFmfi1kSXMhMVubNoP0oIaBp70J6UXU= -go.opentelemetry.io/otel/sdk v1.7.0 h1:4OmStpcKVOfvDOgCt7UriAPtKolwIhxpnSNI/yK+1B0= -go.opentelemetry.io/otel/sdk v1.7.0/go.mod h1:uTEOTwaqIVuTGiJN7ii13Ibp75wJmYUDe374q6cZwUU= -go.opentelemetry.io/otel/sdk/metric v0.30.0 h1:XTqQ4y3erR2Oj8xSAOL5ovO5011ch2ELg51z4fVkpME= -go.opentelemetry.io/otel/sdk/metric v0.30.0/go.mod h1:8AKFRi5HyvTR0RRty3paN1aMC9HMT+NzcEhw/BLkLX8= go.opentelemetry.io/otel/trace v1.7.0 h1:O37Iogk1lEkMRXewVtZ1BBTVn5JEp8GrJvP92bJqC6o= go.opentelemetry.io/otel/trace v1.7.0/go.mod h1:fzLSB9nqR2eXzxPXb2JW9IKE+ScyXA48yyE4TNvoHqU= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= @@ -690,7 +678,6 @@ golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210503080704-8803ae5d1324/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= diff --git a/backend/pkg/monitoring/metrics.go b/backend/pkg/monitoring/metrics.go deleted file mode 100644 index 803fba127..000000000 --- a/backend/pkg/monitoring/metrics.go +++ /dev/null @@ -1,138 +0,0 @@ -package monitoring - -import ( - "fmt" - "log" - "net/http" - - "go.opentelemetry.io/otel/exporters/prometheus" - "go.opentelemetry.io/otel/metric" - "go.opentelemetry.io/otel/metric/global" - "go.opentelemetry.io/otel/metric/instrument/syncfloat64" - "go.opentelemetry.io/otel/sdk/metric/aggregator/histogram" - controller "go.opentelemetry.io/otel/sdk/metric/controller/basic" - "go.opentelemetry.io/otel/sdk/metric/export/aggregation" - processor "go.opentelemetry.io/otel/sdk/metric/processor/basic" - selector "go.opentelemetry.io/otel/sdk/metric/selector/simple" -) - -// Metrics stores all collected metrics -type Metrics struct { - meter metric.Meter - counters map[string]syncfloat64.Counter - upDownCounters map[string]syncfloat64.UpDownCounter - histograms map[string]syncfloat64.Histogram -} - -func New(name string) *Metrics { - m := &Metrics{ - counters: make(map[string]syncfloat64.Counter), - upDownCounters: make(map[string]syncfloat64.UpDownCounter), - histograms: make(map[string]syncfloat64.Histogram), - } - m.initPrometheusDataExporter() - m.initMetrics(name) - return m -} - -// initPrometheusDataExporter allows to use collected metrics in prometheus -func (m *Metrics) initPrometheusDataExporter() { - config := prometheus.Config{ - DefaultHistogramBoundaries: []float64{1, 2, 5, 10, 20, 50, 100, 250, 500, 1000}, - } - c := controller.New( - processor.NewFactory( - selector.NewWithHistogramDistribution( - histogram.WithExplicitBoundaries(config.DefaultHistogramBoundaries), - ), - aggregation.CumulativeTemporalitySelector(), - processor.WithMemory(true), - ), - ) - exporter, err := prometheus.New(config, c) - if err != nil { - log.Panicf("failed to initialize prometheus exporter %v", err) - } - - global.SetMeterProvider(exporter.MeterProvider()) - - http.HandleFunc("/metrics", exporter.ServeHTTP) - go func() { - _ = http.ListenAndServe(":8888", nil) - }() - - fmt.Println("Prometheus server running on :8888") -} - -func (m *Metrics) initMetrics(name string) { - m.meter = global.Meter(name) -} - -/* -Counter is a synchronous instrument that measures additive non-decreasing values, for example, the number of: -- processed requests -- received bytes -- disk reads -*/ - -func (m *Metrics) RegisterCounter(name string) (syncfloat64.Counter, error) { - if counter, ok := m.counters[name]; ok { - return counter, nil - } - counter, err := m.meter.SyncFloat64().Counter(name) - if err != nil { - return nil, fmt.Errorf("failed to initialize counter: %v", err) - } - m.counters[name] = counter - return counter, nil -} - -func (m *Metrics) GetCounter(name string) syncfloat64.Counter { - return m.counters[name] -} - -/* -UpDownCounter is a synchronous instrument which measures additive values that increase or decrease with time, -for example, the number of: -- active requests -- open connections -- memory in use (megabytes) -*/ - -func (m *Metrics) RegisterUpDownCounter(name string) (syncfloat64.UpDownCounter, error) { - if counter, ok := m.upDownCounters[name]; ok { - return counter, nil - } - counter, err := m.meter.SyncFloat64().UpDownCounter(name) - if err != nil { - return nil, fmt.Errorf("failed to initialize upDownCounter: %v", err) - } - m.upDownCounters[name] = counter - return counter, nil -} - -func (m *Metrics) GetUpDownCounter(name string) syncfloat64.UpDownCounter { - return m.upDownCounters[name] -} - -/* -Histogram is a synchronous instrument that produces a histogram from recorded values, for example: -- request latency -- request size -*/ - -func (m *Metrics) RegisterHistogram(name string) (syncfloat64.Histogram, error) { - if hist, ok := m.histograms[name]; ok { - return hist, nil - } - hist, err := m.meter.SyncFloat64().Histogram(name) - if err != nil { - return nil, fmt.Errorf("failed to initialize histogram: %v", err) - } - m.histograms[name] = hist - return hist, nil -} - -func (m *Metrics) GetHistogram(name string) syncfloat64.Histogram { - return m.histograms[name] -} From b93c2ed269172fb72adc886e973037ec3d82de12 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Mon, 20 Feb 2023 17:24:09 +0100 Subject: [PATCH 054/218] fix(ui): change clickmap fetch filter --- frontend/app/services/MetricService.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/services/MetricService.ts b/frontend/app/services/MetricService.ts index d8c6e099a..5b97ec4ec 100644 --- a/frontend/app/services/MetricService.ts +++ b/frontend/app/services/MetricService.ts @@ -75,7 +75,7 @@ export default class MetricService { getMetricChartData(metric: Widget, data: any, isWidget: boolean = false): Promise { if ( metric.metricType === CLICKMAP - && document.location.pathname.split('/').pop() !== 'metrics' + && document.location.pathname.split('/').pop() === 'metrics' && (document.location.pathname.indexOf('dashboard') !== -1 && document.location.pathname.indexOf('metric') === -1) ) { return Promise.resolve({}) From 3f6156dbf7e6586a16884a532d2ab1f51fa0659f Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 20 Feb 2023 18:02:14 +0100 Subject: [PATCH 055/218] fix(ui) - search url unhandled filter key --- .../Filters/FilterSource/FilterSource.tsx | 2 +- frontend/app/types/filter/newFilter.js | 1 + frontend/app/utils/search.ts | 17 ++++++++++++----- 3 files changed, 14 insertions(+), 6 deletions(-) diff --git a/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx b/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx index 08c93d8df..7ae8d3a92 100644 --- a/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx +++ b/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx @@ -9,7 +9,7 @@ interface Props { } function FilterSource(props: Props) { const { filter } = props; - const [value, setValue] = useState(filter.source[0] || ''); + const [value, setValue] = useState(filter.source && filter.source[0] ? filter.source[0] : ''); useEffect(() => { setValue(filter.source[0] || ''); diff --git a/frontend/app/types/filter/newFilter.js b/frontend/app/types/filter/newFilter.js index 7a612c193..286f1cc13 100644 --- a/frontend/app/types/filter/newFilter.js +++ b/frontend/app/types/filter/newFilter.js @@ -195,6 +195,7 @@ export default Record({ _filter = filtersMap[type]; } } + return { ..._filter, ...filter, diff --git a/frontend/app/utils/search.ts b/frontend/app/utils/search.ts index 017a5a7f6..d688ee369 100644 --- a/frontend/app/utils/search.ts +++ b/frontend/app/utils/search.ts @@ -58,9 +58,6 @@ const getFiltersFromEntries = (entires: any) => { let filter: any = {}; const filterKey = getFilterKeyTypeByKey(item.key); - if (!filterKey) { - return; - } const tmp = item.value.split('^'); const valueArr = tmp[0].split('|'); const operator = valueArr.shift(); @@ -78,10 +75,20 @@ const getFiltersFromEntries = (entires: any) => { } } + if (!filter) { + return + } + filter.value = valueArr; filter.operator = operator; - filter.source = sourceArr && sourceArr.length > 0 ? sourceArr : null; - filter.sourceOperator = !!sourceOperator ? decodeURI(sourceOperator) : null; + if (filter.icon === "filters/metadata") { + filter.source = filter.type; + filter.type = 'metadata'; + } else { + filter.source = sourceArr && sourceArr.length > 0 ? sourceArr : null; + filter.sourceOperator = !!sourceOperator ? decodeURI(sourceOperator) : null; + } + if (!filter.filters || filter.filters.size === 0) { filters.push(filter); } From 12c81188edbc1e768019dc5efdf6a2e3c0267047 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 20 Feb 2023 18:26:40 +0100 Subject: [PATCH 056/218] fix(ui) - search url unhandled filter key --- .../app/components/shared/Filters/FilterSource/FilterSource.tsx | 2 +- frontend/app/utils/search.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx b/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx index 7ae8d3a92..07ca61ec3 100644 --- a/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx +++ b/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx @@ -12,7 +12,7 @@ function FilterSource(props: Props) { const [value, setValue] = useState(filter.source && filter.source[0] ? filter.source[0] : ''); useEffect(() => { - setValue(filter.source[0] || ''); + setValue(filter.source && filter.source[0] ? filter.source[0] : ''); }, [filter]); const write = ({ target: { value, name } }: any) => setValue(value); diff --git a/frontend/app/utils/search.ts b/frontend/app/utils/search.ts index d688ee369..4b32f8d13 100644 --- a/frontend/app/utils/search.ts +++ b/frontend/app/utils/search.ts @@ -13,7 +13,7 @@ export const createUrlQuery = (filter: any) => { let str = `${f.operator}|${f.value.join('|')}`; if (f.hasSource) { - str = `${str}^${f.sourceOperator}|${f.source.join('|')}`; + str = `${str}^${f.sourceOperator ? f.sourceOperator : ''}|${f.source ? f.source.join('|') : ''}`; } let key: any = setQueryParamKeyFromFilterkey(f.key); From 47f371bda8a946c06f23c168cec5be7539cb5c92 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 20 Feb 2023 18:27:45 +0100 Subject: [PATCH 057/218] fix(ui) - modal scroll issue --- frontend/app/components/ui/Modal/Modal.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frontend/app/components/ui/Modal/Modal.tsx b/frontend/app/components/ui/Modal/Modal.tsx index 89ba9f5d9..c489aa216 100644 --- a/frontend/app/components/ui/Modal/Modal.tsx +++ b/frontend/app/components/ui/Modal/Modal.tsx @@ -13,7 +13,8 @@ function Modal(props: Props) { useEffect(() => { if (open) { document.body.style.overflow = 'hidden'; - } else { + } + return () => { document.body.style.overflow = 'auto'; } }, [open]); From b563839f291b86d98ef20b05aa4773fbe9bd7d80 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 20 Feb 2023 18:37:39 +0100 Subject: [PATCH 058/218] fix(ui) - card sessions pagination reset --- .../Dashboard/components/WidgetSessions/WidgetSessions.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx b/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx index 4052e7a7e..f563d688e 100644 --- a/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx +++ b/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx @@ -94,7 +94,7 @@ function WidgetSessions(props: Props) { useEffect(() => { metricStore.updateKey('sessionsPage', 1); loadData(); - }, [filter.startTimestamp, filter.endTimestamp, filter.filters, depsString, metricStore.clickMapSearch]); + }, [filter.startTimestamp, filter.endTimestamp, filter.filters, depsString, metricStore.clickMapSearch, activeSeries]); useEffect(loadData, [metricStore.sessionsPage]); return ( From 2a2abc6952c5ebf5e5b323980a70a594f8b9e7bc Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 20 Feb 2023 18:46:52 +0100 Subject: [PATCH 059/218] fix(ui) - filters z-index that causing depth issue --- .../Filters/FilterValueDropdown/FilterValueDropdown.module.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/shared/Filters/FilterValueDropdown/FilterValueDropdown.module.css b/frontend/app/components/shared/Filters/FilterValueDropdown/FilterValueDropdown.module.css index b0ca01016..6e34010b3 100644 --- a/frontend/app/components/shared/Filters/FilterValueDropdown/FilterValueDropdown.module.css +++ b/frontend/app/components/shared/Filters/FilterValueDropdown/FilterValueDropdown.module.css @@ -6,7 +6,7 @@ align-items: center; height: 26px; width: 100%; - z-index: 3; + /* z-index: 3; TODO this has to be fixed in clickmaps @Nikita */ & .right { height: 24px; From cb517e447e91af7114737fb48910f6bfe63e6d55 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Tue, 21 Feb 2023 10:33:11 +0100 Subject: [PATCH 060/218] fix(player): dont load devtools for clickmaps, fix scrolling overflow --- frontend/app/player/web/MessageManager.ts | 5 +++-- frontend/app/player/web/Screen/Screen.ts | 7 +++---- frontend/app/player/web/WebPlayer.ts | 2 +- frontend/app/player/web/addons/TargetMarker.ts | 2 -- 4 files changed, 7 insertions(+), 9 deletions(-) diff --git a/frontend/app/player/web/MessageManager.ts b/frontend/app/player/web/MessageManager.ts index d0ae18020..68ef0cbf8 100644 --- a/frontend/app/player/web/MessageManager.ts +++ b/frontend/app/player/web/MessageManager.ts @@ -193,9 +193,9 @@ export default class MessageManager { // this.state.update({ filesLoaded: true }) } - async loadMessages() { + async loadMessages(isClickmap: boolean = false) { this.setMessagesLoading(true) - // TODO: reuseable decryptor instance + // TODO: reusable decryptor instance const createNewParser = (shouldDecrypt = true) => { const decrypt = shouldDecrypt && this.session.fileKey ? (b: Uint8Array) => decryptSessionBytes(b, this.session.fileKey) @@ -233,6 +233,7 @@ export default class MessageManager { .finally(this.onFileReadFinally); // load devtools (TODO: start after the first DOM file download) + if (isClickmap) return; this.state.update({ devtoolsLoading: true }) loadFiles(this.session.devtoolsURL, createNewParser()) // EFS fallback diff --git a/frontend/app/player/web/Screen/Screen.ts b/frontend/app/player/web/Screen/Screen.ts index b095385b1..cca56d402 100644 --- a/frontend/app/player/web/Screen/Screen.ts +++ b/frontend/app/player/web/Screen/Screen.ts @@ -213,11 +213,12 @@ export default class Screen { case ScaleMode.Embed: this.scaleRatio = Math.min(offsetWidth / width, offsetHeight / height) translate = "translate(-50%, -50%)" + posStyles = { height: height + 'px' } break; case ScaleMode.AdjustParentHeight: this.scaleRatio = offsetWidth / width translate = "translate(-50%, 0)" - posStyles = { top: 0 } + posStyles = { top: 0, height: this.document!.documentElement.getBoundingClientRect().height + 'px', } break; } @@ -232,13 +233,11 @@ export default class Screen { } Object.assign(this.screen.style, posStyles, { - height: height + 'px', width: width + 'px', transform: `scale(${this.scaleRatio}) ${translate}`, }) - Object.assign(this.iframe.style, { + Object.assign(this.iframe.style, posStyles, { width: width + 'px', - height: height + 'px', }) this.boundingRect = this.overlay.getBoundingClientRect(); diff --git a/frontend/app/player/web/WebPlayer.ts b/frontend/app/player/web/WebPlayer.ts index c4da835ff..d94d10beb 100644 --- a/frontend/app/player/web/WebPlayer.ts +++ b/frontend/app/player/web/WebPlayer.ts @@ -46,7 +46,7 @@ export default class WebPlayer extends Player { this.screen = screen this.messageManager = messageManager if (!live) { // hack. TODO: split OfflinePlayer class - messageManager.loadMessages() + void messageManager.loadMessages(isClickMap) } this.targetMarker = new TargetMarker(this.screen, wpState) diff --git a/frontend/app/player/web/addons/TargetMarker.ts b/frontend/app/player/web/addons/TargetMarker.ts index c9315f01b..6629ceaec 100644 --- a/frontend/app/player/web/addons/TargetMarker.ts +++ b/frontend/app/player/web/addons/TargetMarker.ts @@ -161,7 +161,6 @@ export default class TargetMarker { const scaleRatio = this.screen.getScale() Object.assign(overlay.style, clickmapStyles.overlayStyle({ height: iframeSize.height, width: iframeSize.width, scale: scaleRatio })) - console.log(selections) this.clickMapOverlay = overlay selections.forEach((s, i) => { const el = this.screen.getElementBySelector(s.selector); @@ -189,7 +188,6 @@ export default class TargetMarker { const border = document.createElement("div") - let key = 0 if (width > 50) { From e8d41bbcd3630b00b0d1c6c4247c146771f0d2ad Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Tue, 21 Feb 2023 10:30:54 +0000 Subject: [PATCH 061/218] Updating parallel script --- scripts/helmcharts/build_deploy_parallel.sh | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/scripts/helmcharts/build_deploy_parallel.sh b/scripts/helmcharts/build_deploy_parallel.sh index 38c1633bb..268811a34 100644 --- a/scripts/helmcharts/build_deploy_parallel.sh +++ b/scripts/helmcharts/build_deploy_parallel.sh @@ -8,6 +8,12 @@ set -e # Removing local alpine:latest image docker rmi alpine || true +# Signing image +# cosign sign --key awskms:///alias/openreplay-container-sign image_url:tag +export SIGN_IMAGE=1 +export PUSH_IMAGE=1 +export AWS_DEFAULT_REGION="eu-central-1" +export SIGN_KEY="awskms:///alias/openreplay-container-sign" echo $DOCKER_REPO [[ -z DOCKER_REPO ]] && { echo Set DOCKER_REPO="your docker registry" @@ -22,9 +28,9 @@ echo $DOCKER_REPO tmux split-window "cd ../../frontend && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build.sh $@" tmux select-layout tiled tmux split-window "cd ../../sourcemap-reader && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build.sh $@" - tmux split-window "cd ../../api && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build.sh $@" - tmux split-window "cd ../../api && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build_alerts.sh $@" - tmux split-window "cd ../../api && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build_crons.sh $@" + tmux split-window "cd ../../api && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build.sh $@ + && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build_alerts.sh $@ + && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build_crons.sh $@" tmux select-layout tiled } From 9c6cb3c23d88f3ba6f736fa47ed9a0c1a49203a9 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Tue, 21 Feb 2023 11:32:54 +0100 Subject: [PATCH 062/218] chore(build): ignoring ee folder for sourcemap-reader build Signed-off-by: rjshrjndrn --- sourcemap-reader/build.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sourcemap-reader/build.sh b/sourcemap-reader/build.sh index 859347fd4..fbe8762e2 100644 --- a/sourcemap-reader/build.sh +++ b/sourcemap-reader/build.sh @@ -34,7 +34,7 @@ function build_api(){ tag="" # Copy enterprise code [[ $1 == "ee" ]] && { - cp -rf ../ee/sourcemap-reader/* ./ + cp -rf ../ee/sourcemap-reader/* ./ || true # We share same codebase for ee/foss envarg="default-ee" tag="ee-" } From bd10225364def4ab089c89b95d224ad6e28fd7a9 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 21 Feb 2023 11:39:23 +0100 Subject: [PATCH 063/218] fix(ui) - widget sessions pagination --- .../WidgetSessions/WidgetSessions.tsx | 323 ++++++++++-------- 1 file changed, 173 insertions(+), 150 deletions(-) diff --git a/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx b/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx index f563d688e..9535ab976 100644 --- a/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx +++ b/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx @@ -10,168 +10,191 @@ import { debounce } from 'App/utils'; import useIsMounted from 'App/hooks/useIsMounted'; import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG'; import { numberWithCommas } from 'App/utils'; -import { CLICKMAP } from "App/constants/card"; +import { CLICKMAP } from 'App/constants/card'; interface Props { - className?: string; + className?: string; } function WidgetSessions(props: Props) { - const { className = '' } = props; - const [activeSeries, setActiveSeries] = useState('all'); - const [data, setData] = useState([]); - const isMounted = useIsMounted(); - const [loading, setLoading] = useState(false); - const filteredSessions = getListSessionsBySeries(data, activeSeries); - const { dashboardStore, metricStore, sessionStore } = useStore(); - const filter = dashboardStore.drillDownFilter; - const widget = metricStore.instance; - const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat('LLL dd, yyyy HH:mm'); - const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat('LLL dd, yyyy HH:mm'); - const [seriesOptions, setSeriesOptions] = useState([{ label: 'All', value: 'all' }]); + const { className = '' } = props; + const [activeSeries, setActiveSeries] = useState('all'); + const [data, setData] = useState([]); + const isMounted = useIsMounted(); + const [loading, setLoading] = useState(false); + const filteredSessions = getListSessionsBySeries(data, activeSeries); + const { dashboardStore, metricStore, sessionStore } = useStore(); + const filter = dashboardStore.drillDownFilter; + const widget = metricStore.instance; + const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat('LLL dd, yyyy HH:mm'); + const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat('LLL dd, yyyy HH:mm'); + const [seriesOptions, setSeriesOptions] = useState([{ label: 'All', value: 'all' }]); - const writeOption = ({ value }: any) => setActiveSeries(value.value); - useEffect(() => { - if (!data) return; - const seriesOptions = data.map((item: any) => ({ - label: item.seriesName, - value: item.seriesId, - })); - setSeriesOptions([{ label: 'All', value: 'all' }, ...seriesOptions]); - }, [data]); + const writeOption = ({ value }: any) => setActiveSeries(value.value); + useEffect(() => { + if (!data) return; + const seriesOptions = data.map((item: any) => ({ + label: item.seriesName, + value: item.seriesId, + })); + setSeriesOptions([{ label: 'All', value: 'all' }, ...seriesOptions]); + }, [data]); - const fetchSessions = (metricId: any, filter: any) => { - if (!isMounted()) return; - setLoading(true); - delete filter.eventsOrderSupport; - widget - .fetchSessions(metricId, filter) - .then((res: any) => { - setData(res); - }) - .finally(() => { - setLoading(false); - }); - }; - const fetchClickmapSessions = (customFilters: Record) => { - sessionStore.getSessions(customFilters) - .then(data => { - setData([{ ...data, seriesId: 1 , seriesName: "Clicks" }]) - }) + const fetchSessions = (metricId: any, filter: any) => { + if (!isMounted()) return; + setLoading(true); + delete filter.eventsOrderSupport; + widget + .fetchSessions(metricId, filter) + .then((res: any) => { + setData(res); + }) + .finally(() => { + setLoading(false); + }); + }; + const fetchClickmapSessions = (customFilters: Record) => { + sessionStore.getSessions(customFilters).then((data) => { + setData([{ ...data, seriesId: 1, seriesName: 'Clicks' }]); + }); + }; + const debounceRequest: any = React.useCallback(debounce(fetchSessions, 1000), []); + const debounceClickMapSearch = React.useCallback(debounce(fetchClickmapSessions, 1000), []); + + const depsString = JSON.stringify(widget.series); + + const loadData = () => { + if (widget.metricType === CLICKMAP && metricStore.clickMapSearch) { + const clickFilter = { + value: [metricStore.clickMapSearch], + type: 'CLICK', + operator: 'onSelector', + isEvent: true, + // @ts-ignore + filters: [], + }; + const timeRange = { + rangeValue: dashboardStore.drillDownPeriod.rangeValue, + startDate: dashboardStore.drillDownPeriod.start, + endDate: dashboardStore.drillDownPeriod.end, + }; + const customFilter = { + ...filter, + ...timeRange, + filters: [...sessionStore.userFilter.filters, clickFilter], + }; + debounceClickMapSearch(customFilter); + } else { + debounceRequest(widget.metricId, { + ...filter, + series: widget.series.map((s) => s.toJson()), + page: metricStore.sessionsPage, + limit: metricStore.sessionsPageSize, + }); } - const debounceRequest: any = React.useCallback(debounce(fetchSessions, 1000), []); - const debounceClickMapSearch = React.useCallback(debounce(fetchClickmapSessions, 1000), []) + }; + useEffect(() => { + metricStore.updateKey('sessionsPage', 1); + loadData(); + }, [ + filter.startTimestamp, + filter.endTimestamp, + filter.filters, + depsString, + metricStore.clickMapSearch, + activeSeries, + ]); + useEffect(loadData, [metricStore.sessionsPage]); - const depsString = JSON.stringify(widget.series); - - const loadData = () => { - if (widget.metricType === CLICKMAP && metricStore.clickMapSearch) { - const clickFilter = { - value: [ - metricStore.clickMapSearch - ], - type: "CLICK", - operator: "onSelector", - isEvent: true, - // @ts-ignore - "filters": [] - } - const timeRange = { - rangeValue: dashboardStore.drillDownPeriod.rangeValue, - startDate: dashboardStore.drillDownPeriod.start, - endDate: dashboardStore.drillDownPeriod.end, - } - const customFilter = { ...filter, ...timeRange, filters: [ ...sessionStore.userFilter.filters, clickFilter]} - debounceClickMapSearch(customFilter) - } else { - debounceRequest(widget.metricId, { - ...filter, - series: widget.series.map(s => s.toJson()), - page: metricStore.sessionsPage, - limit: metricStore.sessionsPageSize, - }); - } - } - useEffect(() => { - metricStore.updateKey('sessionsPage', 1); - loadData(); - }, [filter.startTimestamp, filter.endTimestamp, filter.filters, depsString, metricStore.clickMapSearch, activeSeries]); - useEffect(loadData, [metricStore.sessionsPage]); - - return ( -
-
-
-

{metricStore.clickMapSearch ? 'Clicks' : 'Sessions'}

-
- {metricStore.clickMapLabel ? `on "${metricStore.clickMapLabel}" ` : null} - between {startTime} and{' '} - {endTime}{' '} -
-
- - {widget.metricType !== 'table' && widget.metricType !== CLICKMAP && ( -
- Filter by Series - +
+ )} +
+ +
+ + + +
+
+ No relevant sessions found for the selected time period. +
+
+ } + show={filteredSessions.sessions.length === 0} + > + {filteredSessions.sessions.map((session: any) => ( + + +
+ + ))} + +
+
+ Showing{' '} + + {(metricStore.sessionsPage - 1) * metricStore.sessionsPageSize + 1} + {' '} + to{' '} + + {(metricStore.sessionsPage - 1) * metricStore.sessionsPageSize + + filteredSessions.sessions.length} + {' '} + of {numberWithCommas(filteredSessions.total)}{' '} + sessions. +
+ metricStore.updateKey('sessionsPage', page)} + limit={metricStore.sessionsPageSize} + debounceRequest={500} + /> +
+ + +
+
+ ); } const getListSessionsBySeries = (data: any, seriesId: any) => { - const arr: any = { sessions: [], total: 0 }; - data.forEach((element: any) => { - if (seriesId === 'all') { - const sessionIds = arr.sessions.map((i: any) => i.sessionId); - arr.sessions.push(...element.sessions.filter((i: any) => !sessionIds.includes(i.sessionId))); - arr.total = element.total; - } else { - if (element.seriesId === seriesId) { - arr.sessions.push(...element.sessions); - arr.total = element.total; - } - } - }); - return arr; + const arr = data.reduce( + (arr: any, element: any) => { + if (seriesId === 'all') { + const sessionIds = arr.sessions.map((i: any) => i.sessionId); + const sessions = element.sessions.filter((i: any) => !sessionIds.includes(i.sessionId)); + arr.sessions.push(...sessions); + } else if (element.seriesId === seriesId) { + const sessionIds = arr.sessions.map((i: any) => i.sessionId); + const sessions = element.sessions.filter((i: any) => !sessionIds.includes(i.sessionId)); + const duplicates = element.sessions.length - sessions.length; + arr.sessions.push(...sessions); + arr.total = element.total - duplicates; + } + return arr; + }, + { sessions: [] } + ); + arr.total = seriesId === 'all' ? Math.max(...data.map((i: any) => i.total)) : data.find((i: any) => i.seriesId === seriesId).total; + return arr; }; export default observer(WidgetSessions); From 30efc6cfe522d5357a681c2e037a2a814abd539b Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Tue, 21 Feb 2023 11:55:45 +0100 Subject: [PATCH 064/218] chore(helm): 2 kafka replicas Signed-off-by: rjshrjndrn --- scripts/helmcharts/databases/values.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/helmcharts/databases/values.yaml b/scripts/helmcharts/databases/values.yaml index 80cdba4e3..1ed77adde 100644 --- a/scripts/helmcharts/databases/values.yaml +++ b/scripts/helmcharts/databases/values.yaml @@ -132,6 +132,7 @@ kafka: tag: 2.8.1 fullnameOverride: kafka enabled: false + replicaCount: 2 # Enterprise dbs From bf4689e171b035c805f4e28cd4d30f0ee0b62cd2 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Tue, 21 Feb 2023 12:03:05 +0100 Subject: [PATCH 065/218] fix(ui): fix alert change value --- .../Dashboard/components/Alerts/AlertForm/Condition.tsx | 1 + frontend/app/mstore/alertsStore.ts | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertForm/Condition.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertForm/Condition.tsx index 5039cc1dd..ba6956323 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertForm/Condition.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertForm/Condition.tsx @@ -2,6 +2,7 @@ import React from 'react'; import { Input } from 'UI'; import Select from 'Shared/Select'; import { alertConditions as conditions } from 'App/constants'; +import Alert from 'Types/alert' const thresholdOptions = [ { label: '15 minutes', value: 15 }, diff --git a/frontend/app/mstore/alertsStore.ts b/frontend/app/mstore/alertsStore.ts index e608c1873..a2d155ffc 100644 --- a/frontend/app/mstore/alertsStore.ts +++ b/frontend/app/mstore/alertsStore.ts @@ -20,7 +20,9 @@ export default class AlertsStore { this.page = 1; } + // TODO: remove it updateKey(key: string, value: any) { + // @ts-ignore this[key] = value } @@ -77,10 +79,10 @@ export default class AlertsStore { edit = (diff: Partial) => { const key = Object.keys(diff)[0] - const oldInst = this.instance + const oldInst = { ...this.instance } // @ts-ignore oldInst[key] = diff[key] - this.instance = oldInst + this.instance = new Alert(oldInst, !!oldInst.alertId) } } From e5cf98362e4d5fd55e39ae573544f842c635540c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 21 Feb 2023 14:56:59 +0100 Subject: [PATCH 066/218] feat(chalice): fixed update alerts --- api/chalicelib/utils/helper.py | 1 + api/schemas.py | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/api/chalicelib/utils/helper.py b/api/chalicelib/utils/helper.py index 85e34ec80..369aff40a 100644 --- a/api/chalicelib/utils/helper.py +++ b/api/chalicelib/utils/helper.py @@ -283,6 +283,7 @@ def custom_alert_to_front(values): # to support frontend format for payload if values.get("seriesId") is not None and values["query"]["left"] == schemas.AlertColumn.custom: values["query"]["left"] = values["seriesId"] + values["seriesId"] = None return values diff --git a/api/schemas.py b/api/schemas.py index ab057426a..dabeea83d 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -363,9 +363,11 @@ class AlertSchema(BaseModel): @root_validator(pre=True) def transform_alert(cls, values): - if values.get("seriesId") is None and isinstance(values["query"]["left"], int): + if isinstance(values["query"]["left"], int): values["seriesId"] = values["query"]["left"] values["query"]["left"] = AlertColumn.custom + else: + values["seriesId"] = None return values From 2448c474766797c920ac04c0d913fd14a46d374e Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 21 Feb 2023 11:39:23 +0100 Subject: [PATCH 067/218] fix(ui) - widget sessions pagination --- .../WidgetSessions/WidgetSessions.tsx | 323 ++++++++++-------- 1 file changed, 173 insertions(+), 150 deletions(-) diff --git a/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx b/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx index f563d688e..9535ab976 100644 --- a/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx +++ b/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx @@ -10,168 +10,191 @@ import { debounce } from 'App/utils'; import useIsMounted from 'App/hooks/useIsMounted'; import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG'; import { numberWithCommas } from 'App/utils'; -import { CLICKMAP } from "App/constants/card"; +import { CLICKMAP } from 'App/constants/card'; interface Props { - className?: string; + className?: string; } function WidgetSessions(props: Props) { - const { className = '' } = props; - const [activeSeries, setActiveSeries] = useState('all'); - const [data, setData] = useState([]); - const isMounted = useIsMounted(); - const [loading, setLoading] = useState(false); - const filteredSessions = getListSessionsBySeries(data, activeSeries); - const { dashboardStore, metricStore, sessionStore } = useStore(); - const filter = dashboardStore.drillDownFilter; - const widget = metricStore.instance; - const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat('LLL dd, yyyy HH:mm'); - const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat('LLL dd, yyyy HH:mm'); - const [seriesOptions, setSeriesOptions] = useState([{ label: 'All', value: 'all' }]); + const { className = '' } = props; + const [activeSeries, setActiveSeries] = useState('all'); + const [data, setData] = useState([]); + const isMounted = useIsMounted(); + const [loading, setLoading] = useState(false); + const filteredSessions = getListSessionsBySeries(data, activeSeries); + const { dashboardStore, metricStore, sessionStore } = useStore(); + const filter = dashboardStore.drillDownFilter; + const widget = metricStore.instance; + const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat('LLL dd, yyyy HH:mm'); + const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat('LLL dd, yyyy HH:mm'); + const [seriesOptions, setSeriesOptions] = useState([{ label: 'All', value: 'all' }]); - const writeOption = ({ value }: any) => setActiveSeries(value.value); - useEffect(() => { - if (!data) return; - const seriesOptions = data.map((item: any) => ({ - label: item.seriesName, - value: item.seriesId, - })); - setSeriesOptions([{ label: 'All', value: 'all' }, ...seriesOptions]); - }, [data]); + const writeOption = ({ value }: any) => setActiveSeries(value.value); + useEffect(() => { + if (!data) return; + const seriesOptions = data.map((item: any) => ({ + label: item.seriesName, + value: item.seriesId, + })); + setSeriesOptions([{ label: 'All', value: 'all' }, ...seriesOptions]); + }, [data]); - const fetchSessions = (metricId: any, filter: any) => { - if (!isMounted()) return; - setLoading(true); - delete filter.eventsOrderSupport; - widget - .fetchSessions(metricId, filter) - .then((res: any) => { - setData(res); - }) - .finally(() => { - setLoading(false); - }); - }; - const fetchClickmapSessions = (customFilters: Record) => { - sessionStore.getSessions(customFilters) - .then(data => { - setData([{ ...data, seriesId: 1 , seriesName: "Clicks" }]) - }) + const fetchSessions = (metricId: any, filter: any) => { + if (!isMounted()) return; + setLoading(true); + delete filter.eventsOrderSupport; + widget + .fetchSessions(metricId, filter) + .then((res: any) => { + setData(res); + }) + .finally(() => { + setLoading(false); + }); + }; + const fetchClickmapSessions = (customFilters: Record) => { + sessionStore.getSessions(customFilters).then((data) => { + setData([{ ...data, seriesId: 1, seriesName: 'Clicks' }]); + }); + }; + const debounceRequest: any = React.useCallback(debounce(fetchSessions, 1000), []); + const debounceClickMapSearch = React.useCallback(debounce(fetchClickmapSessions, 1000), []); + + const depsString = JSON.stringify(widget.series); + + const loadData = () => { + if (widget.metricType === CLICKMAP && metricStore.clickMapSearch) { + const clickFilter = { + value: [metricStore.clickMapSearch], + type: 'CLICK', + operator: 'onSelector', + isEvent: true, + // @ts-ignore + filters: [], + }; + const timeRange = { + rangeValue: dashboardStore.drillDownPeriod.rangeValue, + startDate: dashboardStore.drillDownPeriod.start, + endDate: dashboardStore.drillDownPeriod.end, + }; + const customFilter = { + ...filter, + ...timeRange, + filters: [...sessionStore.userFilter.filters, clickFilter], + }; + debounceClickMapSearch(customFilter); + } else { + debounceRequest(widget.metricId, { + ...filter, + series: widget.series.map((s) => s.toJson()), + page: metricStore.sessionsPage, + limit: metricStore.sessionsPageSize, + }); } - const debounceRequest: any = React.useCallback(debounce(fetchSessions, 1000), []); - const debounceClickMapSearch = React.useCallback(debounce(fetchClickmapSessions, 1000), []) + }; + useEffect(() => { + metricStore.updateKey('sessionsPage', 1); + loadData(); + }, [ + filter.startTimestamp, + filter.endTimestamp, + filter.filters, + depsString, + metricStore.clickMapSearch, + activeSeries, + ]); + useEffect(loadData, [metricStore.sessionsPage]); - const depsString = JSON.stringify(widget.series); - - const loadData = () => { - if (widget.metricType === CLICKMAP && metricStore.clickMapSearch) { - const clickFilter = { - value: [ - metricStore.clickMapSearch - ], - type: "CLICK", - operator: "onSelector", - isEvent: true, - // @ts-ignore - "filters": [] - } - const timeRange = { - rangeValue: dashboardStore.drillDownPeriod.rangeValue, - startDate: dashboardStore.drillDownPeriod.start, - endDate: dashboardStore.drillDownPeriod.end, - } - const customFilter = { ...filter, ...timeRange, filters: [ ...sessionStore.userFilter.filters, clickFilter]} - debounceClickMapSearch(customFilter) - } else { - debounceRequest(widget.metricId, { - ...filter, - series: widget.series.map(s => s.toJson()), - page: metricStore.sessionsPage, - limit: metricStore.sessionsPageSize, - }); - } - } - useEffect(() => { - metricStore.updateKey('sessionsPage', 1); - loadData(); - }, [filter.startTimestamp, filter.endTimestamp, filter.filters, depsString, metricStore.clickMapSearch, activeSeries]); - useEffect(loadData, [metricStore.sessionsPage]); - - return ( -
-
-
-

{metricStore.clickMapSearch ? 'Clicks' : 'Sessions'}

-
- {metricStore.clickMapLabel ? `on "${metricStore.clickMapLabel}" ` : null} - between {startTime} and{' '} - {endTime}{' '} -
-
- - {widget.metricType !== 'table' && widget.metricType !== CLICKMAP && ( -
- Filter by Series - +
+ )} +
+ +
+ + + +
+
+ No relevant sessions found for the selected time period. +
+
+ } + show={filteredSessions.sessions.length === 0} + > + {filteredSessions.sessions.map((session: any) => ( + + +
+ + ))} + +
+
+ Showing{' '} + + {(metricStore.sessionsPage - 1) * metricStore.sessionsPageSize + 1} + {' '} + to{' '} + + {(metricStore.sessionsPage - 1) * metricStore.sessionsPageSize + + filteredSessions.sessions.length} + {' '} + of {numberWithCommas(filteredSessions.total)}{' '} + sessions. +
+ metricStore.updateKey('sessionsPage', page)} + limit={metricStore.sessionsPageSize} + debounceRequest={500} + /> +
+ + +
+
+ ); } const getListSessionsBySeries = (data: any, seriesId: any) => { - const arr: any = { sessions: [], total: 0 }; - data.forEach((element: any) => { - if (seriesId === 'all') { - const sessionIds = arr.sessions.map((i: any) => i.sessionId); - arr.sessions.push(...element.sessions.filter((i: any) => !sessionIds.includes(i.sessionId))); - arr.total = element.total; - } else { - if (element.seriesId === seriesId) { - arr.sessions.push(...element.sessions); - arr.total = element.total; - } - } - }); - return arr; + const arr = data.reduce( + (arr: any, element: any) => { + if (seriesId === 'all') { + const sessionIds = arr.sessions.map((i: any) => i.sessionId); + const sessions = element.sessions.filter((i: any) => !sessionIds.includes(i.sessionId)); + arr.sessions.push(...sessions); + } else if (element.seriesId === seriesId) { + const sessionIds = arr.sessions.map((i: any) => i.sessionId); + const sessions = element.sessions.filter((i: any) => !sessionIds.includes(i.sessionId)); + const duplicates = element.sessions.length - sessions.length; + arr.sessions.push(...sessions); + arr.total = element.total - duplicates; + } + return arr; + }, + { sessions: [] } + ); + arr.total = seriesId === 'all' ? Math.max(...data.map((i: any) => i.total)) : data.find((i: any) => i.seriesId === seriesId).total; + return arr; }; export default observer(WidgetSessions); From 75f0e103bc27b37e48ca06fcf52b8fc886ffd6bd Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Tue, 21 Feb 2023 11:55:45 +0100 Subject: [PATCH 068/218] chore(helm): 2 kafka replicas Signed-off-by: rjshrjndrn --- scripts/helmcharts/databases/values.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/helmcharts/databases/values.yaml b/scripts/helmcharts/databases/values.yaml index 80cdba4e3..1ed77adde 100644 --- a/scripts/helmcharts/databases/values.yaml +++ b/scripts/helmcharts/databases/values.yaml @@ -132,6 +132,7 @@ kafka: tag: 2.8.1 fullnameOverride: kafka enabled: false + replicaCount: 2 # Enterprise dbs From 9e1f2444802aff0e0d2da687b530769e8affed4c Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Tue, 21 Feb 2023 12:03:05 +0100 Subject: [PATCH 069/218] fix(ui): fix alert change value --- .../Dashboard/components/Alerts/AlertForm/Condition.tsx | 1 + frontend/app/mstore/alertsStore.ts | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertForm/Condition.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertForm/Condition.tsx index 5039cc1dd..ba6956323 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertForm/Condition.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertForm/Condition.tsx @@ -2,6 +2,7 @@ import React from 'react'; import { Input } from 'UI'; import Select from 'Shared/Select'; import { alertConditions as conditions } from 'App/constants'; +import Alert from 'Types/alert' const thresholdOptions = [ { label: '15 minutes', value: 15 }, diff --git a/frontend/app/mstore/alertsStore.ts b/frontend/app/mstore/alertsStore.ts index e608c1873..a2d155ffc 100644 --- a/frontend/app/mstore/alertsStore.ts +++ b/frontend/app/mstore/alertsStore.ts @@ -20,7 +20,9 @@ export default class AlertsStore { this.page = 1; } + // TODO: remove it updateKey(key: string, value: any) { + // @ts-ignore this[key] = value } @@ -77,10 +79,10 @@ export default class AlertsStore { edit = (diff: Partial) => { const key = Object.keys(diff)[0] - const oldInst = this.instance + const oldInst = { ...this.instance } // @ts-ignore oldInst[key] = diff[key] - this.instance = oldInst + this.instance = new Alert(oldInst, !!oldInst.alertId) } } From 9a1d456add7bd4dbb91ae4e9d2aeb8685f7bbee9 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 21 Feb 2023 15:12:48 +0100 Subject: [PATCH 070/218] change(ui) - widget sessions clear filters --- .../WidgetSessions/WidgetSessions.tsx | 28 +++++++++++++------ 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx b/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx index 9535ab976..3720dd94b 100644 --- a/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx +++ b/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx @@ -1,5 +1,5 @@ import React, { useEffect, useState } from 'react'; -import { NoContent, Loader, Pagination } from 'UI'; +import { NoContent, Loader, Pagination, Button } from 'UI'; import Select from 'Shared/Select'; import cn from 'classnames'; import { useStore } from 'App/mstore'; @@ -28,6 +28,7 @@ function WidgetSessions(props: Props) { const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat('LLL dd, yyyy HH:mm'); const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat('LLL dd, yyyy HH:mm'); const [seriesOptions, setSeriesOptions] = useState([{ label: 'All', value: 'all' }]); + const hasFilters = filter.filters.length > 0 || (filter.startTimestamp !== dashboardStore.drillDownPeriod.start || filter.endTimestamp !== dashboardStore.drillDownPeriod.end); const writeOption = ({ value }: any) => setActiveSeries(value.value); useEffect(() => { @@ -105,6 +106,11 @@ function WidgetSessions(props: Props) { ]); useEffect(loadData, [metricStore.sessionsPage]); + const clearFilters = () => { + metricStore.updateKey('sessionsPage', 1); + dashboardStore.resetDrillDownFilter(); + } + return (
@@ -117,12 +123,15 @@ function WidgetSessions(props: Props) {
- {widget.metricType !== 'table' && widget.metricType !== CLICKMAP && ( -
- Filter by Series - +
+ )} +
@@ -193,7 +202,10 @@ const getListSessionsBySeries = (data: any, seriesId: any) => { }, { sessions: [] } ); - arr.total = seriesId === 'all' ? Math.max(...data.map((i: any) => i.total)) : data.find((i: any) => i.seriesId === seriesId).total; + arr.total = + seriesId === 'all' + ? Math.max(...data.map((i: any) => i.total)) + : data.find((i: any) => i.seriesId === seriesId).total; return arr; }; From 65f6d403df4300aaa05c81c091d3be958302c730 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 21 Feb 2023 15:15:44 +0100 Subject: [PATCH 071/218] feat(chalice): fixed update alerts --- api/schemas.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/api/schemas.py b/api/schemas.py index dabeea83d..5cae3a31a 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -363,11 +363,10 @@ class AlertSchema(BaseModel): @root_validator(pre=True) def transform_alert(cls, values): + values["seriesId"] = None if isinstance(values["query"]["left"], int): values["seriesId"] = values["query"]["left"] values["query"]["left"] = AlertColumn.custom - else: - values["seriesId"] = None return values From 61eb1c1fd7bf92fe5d9c4f8208e6940d050e7057 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 21 Feb 2023 15:12:48 +0100 Subject: [PATCH 072/218] change(ui) - widget sessions clear filters --- .../WidgetSessions/WidgetSessions.tsx | 28 +++++++++++++------ 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx b/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx index 9535ab976..3720dd94b 100644 --- a/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx +++ b/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx @@ -1,5 +1,5 @@ import React, { useEffect, useState } from 'react'; -import { NoContent, Loader, Pagination } from 'UI'; +import { NoContent, Loader, Pagination, Button } from 'UI'; import Select from 'Shared/Select'; import cn from 'classnames'; import { useStore } from 'App/mstore'; @@ -28,6 +28,7 @@ function WidgetSessions(props: Props) { const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat('LLL dd, yyyy HH:mm'); const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat('LLL dd, yyyy HH:mm'); const [seriesOptions, setSeriesOptions] = useState([{ label: 'All', value: 'all' }]); + const hasFilters = filter.filters.length > 0 || (filter.startTimestamp !== dashboardStore.drillDownPeriod.start || filter.endTimestamp !== dashboardStore.drillDownPeriod.end); const writeOption = ({ value }: any) => setActiveSeries(value.value); useEffect(() => { @@ -105,6 +106,11 @@ function WidgetSessions(props: Props) { ]); useEffect(loadData, [metricStore.sessionsPage]); + const clearFilters = () => { + metricStore.updateKey('sessionsPage', 1); + dashboardStore.resetDrillDownFilter(); + } + return (
@@ -117,12 +123,15 @@ function WidgetSessions(props: Props) {
- {widget.metricType !== 'table' && widget.metricType !== CLICKMAP && ( -
- Filter by Series - +
+ )} +
@@ -193,7 +202,10 @@ const getListSessionsBySeries = (data: any, seriesId: any) => { }, { sessions: [] } ); - arr.total = seriesId === 'all' ? Math.max(...data.map((i: any) => i.total)) : data.find((i: any) => i.seriesId === seriesId).total; + arr.total = + seriesId === 'all' + ? Math.max(...data.map((i: any) => i.total)) + : data.find((i: any) => i.seriesId === seriesId).total; return arr; }; From bc227dc450f54ee2359af3ee9ed099e59e38431f Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Tue, 21 Feb 2023 15:19:16 +0100 Subject: [PATCH 073/218] fix(ui): fix alert trigger name --- .../components/Alerts/AlertListItem.tsx | 42 ++++++++++++------- .../Dashboard/components/Alerts/NewAlert.tsx | 8 +++- 2 files changed, 34 insertions(+), 16 deletions(-) diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx index 024cc734c..071dd204c 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx @@ -42,9 +42,8 @@ const getNotifyChannel = (alert: Record, webhooks: Array) => { ' (' + alert.msteamsInput .map((channelId: number) => { - return ( - webhooks.find((hook) => hook.webhookId === channelId && hook.type === 'msteams')?.name - ); + return webhooks.find((hook) => hook.webhookId === channelId && hook.type === 'msteams') + ?.name; }) .join(', ') + ')' @@ -58,7 +57,7 @@ const getNotifyChannel = (alert: Record, webhooks: Array) => { } } if (alert.msteams) { - str += (str === '' ? '' : ' and ') + 'MS Teams' + str += (str === '' ? '' : ' and ') + 'MS Teams'; if (alert.msteamsInput.length > 0) { str += getMsTeamsChannels(); } @@ -79,10 +78,11 @@ interface Props extends RouteComponentProps { init: (alert: Alert) => void; demo?: boolean; webhooks: Array; + triggerOptions: Record; } function AlertListItem(props: Props) { - const { alert, siteId, history, init, demo, webhooks } = props; + const { alert, siteId, history, init, demo, webhooks, triggerOptions } = props; if (!alert) { return null; @@ -95,6 +95,11 @@ function AlertListItem(props: Props) { history.push(path); }; + const formTriggerName = () => + Number.isInteger(alert.query.left) && triggerOptions + ? triggerOptions.find((opt: { value: any, label: string }) => opt.value === alert.query.left).label + : alert.query.left; + return (
{'When the '} - {alert.detectionMethod} + + {alert.detectionMethod} + {' of '} - {alert.seriesName || alert.query.left} + + {triggerOptions ? formTriggerName() : alert.seriesName} + {' is '} {alert.query.operator} - {numberWithCommas(alert.query.right)}{alert.change === 'percent' ? '%' : alert.metric?.unit} + {numberWithCommas(alert.query.right)} + {alert.change === 'percent' ? '%' : alert.metric?.unit} {' over the past '} - {getThreshold( - alert.currentPeriod)} + + {getThreshold(alert.currentPeriod)} + {alert.detectionMethod === 'change' ? ( <> {' compared to the previous '} - {getThreshold( - alert.previousPeriod)} + + {getThreshold(alert.previousPeriod)} + ) : null} {', notify me on '} diff --git a/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx b/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx index 717c7ea59..67a6bb459 100644 --- a/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx @@ -278,7 +278,13 @@ const NewAlert = (props: IProps) => {
{instance && ( - null} webhooks={webhooks} /> + null} + webhooks={webhooks} /> )}
From 4bdc095c76a1c421ae9cb04ef186f3ba4d0a7277 Mon Sep 17 00:00:00 2001 From: Alexander Zavorotynskiy Date: Tue, 21 Feb 2023 15:29:24 +0100 Subject: [PATCH 074/218] fix(backend): removed wrong defer in db.Saver --- ee/backend/internal/db/datasaver/saver.go | 1 - 1 file changed, 1 deletion(-) diff --git a/ee/backend/internal/db/datasaver/saver.go b/ee/backend/internal/db/datasaver/saver.go index 76057309d..e05e502f1 100644 --- a/ee/backend/internal/db/datasaver/saver.go +++ b/ee/backend/internal/db/datasaver/saver.go @@ -19,7 +19,6 @@ func New(pg *cache.PGCache, cfg *db.Config) *Saver { var producer types.Producer = nil if cfg.UseQuickwit { producer = queue.NewProducer(cfg.MessageSizeLimit, true) - defer producer.Close(15000) } return &Saver{pg: pg, producer: producer, topic: cfg.QuickwitTopic} } From 56c14692733aa0b02d638674eaaa6dd7e81a2ce1 Mon Sep 17 00:00:00 2001 From: Alexander Zavorotynskiy Date: Tue, 21 Feb 2023 15:51:03 +0100 Subject: [PATCH 075/218] fix(backend): removed debug log from session iterator --- backend/pkg/messages/session-iterator.go | 7 ------- 1 file changed, 7 deletions(-) diff --git a/backend/pkg/messages/session-iterator.go b/backend/pkg/messages/session-iterator.go index eb9f32387..45daae4b8 100644 --- a/backend/pkg/messages/session-iterator.go +++ b/backend/pkg/messages/session-iterator.go @@ -40,13 +40,6 @@ func SplitMessages(data []byte) ([]*msgInfo, error) { return nil, fmt.Errorf("read message type err: %s", err) } - if msgType == MsgRedux { - log.Printf("redux") - } - if msgType == MsgFetch { - log.Printf("fetch") - } - // Read message body _, err = ReadMessage(msgType, reader) if err != nil { From f01ff51d3ffea0d6752247285bcd4334d3dca3a4 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Tue, 21 Feb 2023 15:46:21 +0100 Subject: [PATCH 076/218] change(tracker): tracker v4.9.10; assist v.4.1.6 --- tracker/tracker-assist/CHANGELOG.md | 5 +++++ tracker/tracker-assist/package.json | 2 +- tracker/tracker/package.json | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/tracker/tracker-assist/CHANGELOG.md b/tracker/tracker-assist/CHANGELOG.md index 5afe0ef96..7ce338e9b 100644 --- a/tracker/tracker-assist/CHANGELOG.md +++ b/tracker/tracker-assist/CHANGELOG.md @@ -1,6 +1,11 @@ +## 4.1.6 + +- fix recording state import + ## 4.1.5 - fixed peerjs hack that caused ts compile issues +- - added screen recording feature (EE) license ## 4.1.4 diff --git a/tracker/tracker-assist/package.json b/tracker/tracker-assist/package.json index 7e64ceab8..8b9dcd8ef 100644 --- a/tracker/tracker-assist/package.json +++ b/tracker/tracker-assist/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-assist", "description": "Tracker plugin for screen assistance through the WebRTC", - "version": "4.1.5", + "version": "4.1.6", "keywords": [ "WebRTC", "assistance", diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index 8e65c3f4c..5682ffc1c 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "4.1.9-beta.4", + "version": "4.1.10", "keywords": [ "logging", "replay" From c6bed2fc0a1ab0947cd0d2b65bad97e62b1cd58a Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Tue, 21 Feb 2023 16:11:15 +0100 Subject: [PATCH 077/218] fix(player): fix live player on player ready --- .../Session/Player/LivePlayer/LivePlayerInst.tsx | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/frontend/app/components/Session/Player/LivePlayer/LivePlayerInst.tsx b/frontend/app/components/Session/Player/LivePlayer/LivePlayerInst.tsx index 2472b6547..c17007648 100644 --- a/frontend/app/components/Session/Player/LivePlayer/LivePlayerInst.tsx +++ b/frontend/app/components/Session/Player/LivePlayer/LivePlayerInst.tsx @@ -4,7 +4,7 @@ import { findDOMNode } from 'react-dom'; import cn from 'classnames'; import LiveControls from './LiveControls'; import ConsolePanel from 'Shared/DevTools/ConsolePanel'; - +import { observer } from 'mobx-react-lite' import Overlay from './Overlay'; import stl from 'Components/Session_/Player/player.module.css'; import { PlayerContext, ILivePlayerContext } from 'App/components/Session/playerContext'; @@ -27,7 +27,9 @@ function Player(props: IProps) { // @ts-ignore TODO const playerContext = React.useContext(PlayerContext); const screenWrapper = React.useRef(null); + const ready = playerContext.store.get().ready + console.log(ready) React.useEffect(() => { if (!props.closedLive || isMultiview) { const parentElement = findDOMNode(screenWrapper.current) as HTMLDivElement | null; //TODO: good architecture @@ -40,7 +42,7 @@ function Player(props: IProps) { React.useEffect(() => { playerContext.player.scale(); - }, [playerContext.player]); + }, [playerContext.player, ready]); if (!playerContext.player) return null; @@ -78,4 +80,4 @@ export default connect( (isAssist && !state.getIn(['sessions', 'current']).live), }; } -)(Player); +)(observer(Player)); From 601c460dc84c6687ec7c32931d4f6109a51c3bc3 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 21 Feb 2023 16:11:38 +0100 Subject: [PATCH 078/218] change(ui) - enterprise text --- frontend/app/components/Assist/Assist.tsx | 3 ++- .../Dashboard/components/DashboardOptions/DashboardOptions.tsx | 3 ++- .../Dashboard/components/MetricTypeItem/MetricTypeItem.tsx | 3 ++- .../Dashboard/components/MetricTypeList/MetricTypeList.tsx | 3 ++- frontend/app/components/Login/Login.js | 3 ++- .../app/components/Session_/ScreenRecorder/ScreenRecorder.tsx | 3 ++- .../shared/CustomDropdownOption/CustomDropdownOption.tsx | 3 ++- frontend/app/constants/index.js | 3 ++- 8 files changed, 16 insertions(+), 8 deletions(-) diff --git a/frontend/app/components/Assist/Assist.tsx b/frontend/app/components/Assist/Assist.tsx index abb1403a9..b58df1352 100644 --- a/frontend/app/components/Assist/Assist.tsx +++ b/frontend/app/components/Assist/Assist.tsx @@ -6,6 +6,7 @@ import AssistRouter from './AssistRouter'; import { SideMenuitem } from 'UI'; import { withSiteId, assist, recordings } from 'App/routes'; import { connect } from 'react-redux'; +import { ENTERPRISE_REQUEIRED } from 'App/constants'; interface Props extends RouteComponentProps { siteId: string; @@ -40,7 +41,7 @@ function Assist(props: Props) { iconName="record-circle" onClick={() => redirect(recordings())} disabled={!isEnterprise} - tooltipTitle="This feature requires an enterprise license." + tooltipTitle={ENTERPRISE_REQUEIRED} />
diff --git a/frontend/app/components/Dashboard/components/DashboardOptions/DashboardOptions.tsx b/frontend/app/components/Dashboard/components/DashboardOptions/DashboardOptions.tsx index 62cfd9404..b006dbb22 100644 --- a/frontend/app/components/Dashboard/components/DashboardOptions/DashboardOptions.tsx +++ b/frontend/app/components/Dashboard/components/DashboardOptions/DashboardOptions.tsx @@ -1,6 +1,7 @@ import React from 'react'; import { ItemMenu } from 'UI'; import { connect } from 'react-redux'; +import { ENTERPRISE_REQUEIRED } from 'App/constants'; interface Props { editHandler: (isTitle: boolean) => void; @@ -16,7 +17,7 @@ function DashboardOptions(props: Props) { { icon: 'text-paragraph', text: `${!isTitlePresent ? 'Add' : 'Edit'} Description`, onClick: () => editHandler(false) }, { icon: 'users', text: 'Visibility & Access', onClick: editHandler }, { icon: 'trash', text: 'Delete', onClick: deleteHandler }, - { icon: 'pdf-download', text: 'Download Report', onClick: renderReport, disabled: !isEnterprise, tooltipTitle: 'This feature requires an enterprise license.' } + { icon: 'pdf-download', text: 'Download Report', onClick: renderReport, disabled: !isEnterprise, tooltipTitle: {ENTERPRISE_REQUEIRED} } ] return ( diff --git a/frontend/app/components/Dashboard/components/MetricTypeItem/MetricTypeItem.tsx b/frontend/app/components/Dashboard/components/MetricTypeItem/MetricTypeItem.tsx index 0d5fc4c89..c28389c4a 100644 --- a/frontend/app/components/Dashboard/components/MetricTypeItem/MetricTypeItem.tsx +++ b/frontend/app/components/Dashboard/components/MetricTypeItem/MetricTypeItem.tsx @@ -2,6 +2,7 @@ import { IconNames } from 'App/components/ui/SVG'; import React from 'react'; import { Icon, Tooltip } from 'UI'; import cn from 'classnames'; +import { ENTERPRISE_REQUEIRED } from 'App/constants'; export interface MetricType { title: string; @@ -23,7 +24,7 @@ function MetricTypeItem(props: Props) { onClick = () => {}, } = props; return ( - +
{authDetails.edition === 'ee' ? "SSO has not been configured. Please reach out to your admin." : "This feature requires an enterprise license."}
} + title={
{authDetails.edition === 'ee' ? "SSO has not been configured. Please reach out to your admin." : ENTERPRISE_REQUEIRED}
} placement="top" > - {webhook.exists() && } -
- {webhook.exists() && - } -
- +
+
+ + {webhook.exists() && } +
+ {webhook.exists() && ( + + )}
- ); + +
+ ); } export default observer(WebhookForm); diff --git a/frontend/app/mstore/settingsStore.ts b/frontend/app/mstore/settingsStore.ts index 7dd584fd3..c31071694 100644 --- a/frontend/app/mstore/settingsStore.ts +++ b/frontend/app/mstore/settingsStore.ts @@ -6,7 +6,6 @@ import Webhook, { IWebhook } from 'Types/webhook'; import { webhookService } from 'App/services'; -import Alert, { IAlert } from "Types/alert"; export default class SettingsStore { loadingCaptureRate: boolean = false; @@ -73,8 +72,11 @@ export default class SettingsStore { this.webhookInst = new Webhook(data) if (inst.webhookId === undefined) this.setWebhooks([...this.webhooks, this.webhookInst]) else this.setWebhooks([...this.webhooks.filter(hook => hook.webhookId !== data.webhookId), this.webhookInst]) - this.hooksLoading = false + }) + .finally(() => { + this.hooksLoading = false + }) } setWebhooks = (webhooks: Webhook[]) => { diff --git a/frontend/app/services/WebhookService.ts b/frontend/app/services/WebhookService.ts index 2bcefa619..7b1073867 100644 --- a/frontend/app/services/WebhookService.ts +++ b/frontend/app/services/WebhookService.ts @@ -6,20 +6,17 @@ export default class WebhookService extends BaseService { return this.client.get('/webhooks') .then(r => r.json()) .then(j => j.data || []) - .catch(Promise.reject) } saveWebhook(inst: Webhook) { return this.client.put('/webhooks', inst) .then(r => r.json()) .then(j => j.data || {}) - .catch(Promise.reject) } removeWebhook(id: Webhook["webhookId"]) { return this.client.delete('/webhooks/' + id) .then(r => r.json()) .then(j => j.data || {}) - .catch(Promise.reject) } } \ No newline at end of file From 4c4ffc2bcaad3777f3db0b95faf464a7a6365c30 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 22 Feb 2023 14:25:03 +0100 Subject: [PATCH 094/218] fix(ui): remove consolelog --- .../shared/FetchDetailsModal/components/FetchTabs/FetchTabs.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/frontend/app/components/shared/FetchDetailsModal/components/FetchTabs/FetchTabs.tsx b/frontend/app/components/shared/FetchDetailsModal/components/FetchTabs/FetchTabs.tsx index 6efbb506d..c6495227b 100644 --- a/frontend/app/components/shared/FetchDetailsModal/components/FetchTabs/FetchTabs.tsx +++ b/frontend/app/components/shared/FetchDetailsModal/components/FetchTabs/FetchTabs.tsx @@ -62,7 +62,6 @@ function FetchTabs({ resource }: Props) { const [requestHeaders, setRequestHeaders] = useState | null>(null); const [responseHeaders, setResponseHeaders] = useState | null>(null); - console.log(resource) useEffect(() => { const { request, response } = resource; parseRequestResponse( From 5bb9a8e73adc5063225ee39c4a0e56629c8ee5b5 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 22 Feb 2023 17:11:34 +0100 Subject: [PATCH 095/218] feat(chalice): fixed insights --- ee/api/chalicelib/core/sessions_insights.py | 32 +++++++++++---------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/ee/api/chalicelib/core/sessions_insights.py b/ee/api/chalicelib/core/sessions_insights.py index 9f05e9b11..532e47e19 100644 --- a/ee/api/chalicelib/core/sessions_insights.py +++ b/ee/api/chalicelib/core/sessions_insights.py @@ -173,7 +173,7 @@ def query_requests_by_period(project_id, start_time, end_time, filters: Optional if n == n_: data_['value'] = v[0] data_['oldValue'] = v[1] - data_['change'] = 100* v[2] + data_['change'] = 100 * v[2] data_['isNew'] = False break results.append(data_) @@ -252,12 +252,12 @@ def query_most_errors_by_period(project_id, start_time, end_time, for n in common_errors: if n is None: continue - old_errors = _sum_table_index(_table_where(table_hh2, names_idx, n), names_idx) - if old_errors == 0: + sum_old_errors = _sum_table_index(_table_where(table_hh2, names_idx, n), sessions_idx) + if sum_old_errors == 0: continue - new_errors = _sum_table_index(_table_where(table_hh1, names_idx, n), names_idx) + sum_new_errors = _sum_table_index(_table_where(table_hh1, names_idx, n), sessions_idx) # error_increase[n] = (new_errors - old_errors) / old_errors - error_values[n] = new_errors, old_errors, (new_errors - old_errors) / old_errors + error_values[n] = sum_new_errors, sum_old_errors, (sum_new_errors - sum_old_errors) / sum_old_errors ratio = sorted(percentage_errors.items(), key=lambda k: k[1], reverse=True) increase = sorted(error_values.items(), key=lambda k: k[1][-1], reverse=True) names_ = set([k[0] for k in increase[:3] + ratio[:3]] + new_errors[:3]) @@ -347,18 +347,20 @@ def query_cpu_memory_by_period(project_id, start_time, end_time, output = list() if cpu_oldvalue is not None or cpu_newvalue is not None: output.append({'category': schemas_ee.InsightCategories.resources, - 'name': 'cpu', - 'value': cpu_newvalue, - 'oldValue': cpu_oldvalue, - 'change': 100 * (cpu_newvalue - cpu_oldvalue) / cpu_oldvalue if cpu_ratio is not None else cpu_ratio, - 'isNew': True if cpu_newvalue is not None and cpu_oldvalue is None else False}) + 'name': 'cpu', + 'value': cpu_newvalue, + 'oldValue': cpu_oldvalue, + 'change': 100 * ( + cpu_newvalue - cpu_oldvalue) / cpu_oldvalue if cpu_ratio is not None else cpu_ratio, + 'isNew': True if cpu_newvalue is not None and cpu_oldvalue is None else False}) if mem_oldvalue is not None or mem_newvalue is not None: output.append({'category': schemas_ee.InsightCategories.resources, - 'name': 'memory', - 'value': mem_newvalue, - 'oldValue': mem_oldvalue, - 'change': 100 * (mem_newvalue - mem_oldvalue) / mem_oldvalue if mem_ratio is not None else mem_ratio, - 'isNew': True if mem_newvalue is not None and mem_oldvalue is None else False}) + 'name': 'memory', + 'value': mem_newvalue, + 'oldValue': mem_oldvalue, + 'change': 100 * ( + mem_newvalue - mem_oldvalue) / mem_oldvalue if mem_ratio is not None else mem_ratio, + 'isNew': True if mem_newvalue is not None and mem_oldvalue is None else False}) return output From c0f2602c17beb2c8e4c68ea7bb6386a01830e3f8 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 22 Feb 2023 10:15:42 +0100 Subject: [PATCH 096/218] change(ui): fix for network tabs? --- .../FetchDetailsModal/components/FetchTabs/FetchTabs.tsx | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/frontend/app/components/shared/FetchDetailsModal/components/FetchTabs/FetchTabs.tsx b/frontend/app/components/shared/FetchDetailsModal/components/FetchTabs/FetchTabs.tsx index 63415ed26..6efbb506d 100644 --- a/frontend/app/components/shared/FetchDetailsModal/components/FetchTabs/FetchTabs.tsx +++ b/frontend/app/components/shared/FetchDetailsModal/components/FetchTabs/FetchTabs.tsx @@ -22,7 +22,7 @@ function parseRequestResponse( setStringBody(''); return; } - let json = JSON.parse(r) + const json = JSON.parse(r) const hs = json.headers const bd = json.body as string @@ -35,11 +35,8 @@ function parseRequestResponse( setJSONBody(null) setStringBody('') } - if (typeof bd !== 'string') { - throw new Error(`body is not a string`) - } try { - let jBody = JSON.parse(bd) + const jBody = JSON.parse(bd) if (typeof jBody === "object" && jBody != null) { setJSONBody(jBody) } else { From 203f0131b43bfc633ca6592c7f5e56aa52ccc7ff Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 22 Feb 2023 12:25:15 +0100 Subject: [PATCH 097/218] fix(ui): display error for webhooks --- .../components/Client/Webhooks/WebhookForm.js | 133 ++++++++++-------- frontend/app/mstore/settingsStore.ts | 6 +- frontend/app/services/WebhookService.ts | 3 - 3 files changed, 76 insertions(+), 66 deletions(-) diff --git a/frontend/app/components/Client/Webhooks/WebhookForm.js b/frontend/app/components/Client/Webhooks/WebhookForm.js index 62f009f1e..08799456f 100644 --- a/frontend/app/components/Client/Webhooks/WebhookForm.js +++ b/frontend/app/components/Client/Webhooks/WebhookForm.js @@ -1,75 +1,86 @@ import React from 'react'; import { Form, Button, Input } from 'UI'; import styles from './webhookForm.module.css'; -import { useStore } from 'App/mstore' -import { observer } from 'mobx-react-lite' +import { useStore } from 'App/mstore'; +import { observer } from 'mobx-react-lite'; +import { toast } from 'react-toastify'; function WebhookForm(props) { - const { settingsStore } = useStore() - const { webhookInst: webhook, hooksLoading: loading, saveWebhook, editWebhook } = settingsStore - const write = ({ target: { value, name } }) => editWebhook({ [name]: value }); + const { settingsStore } = useStore(); + const { webhookInst: webhook, hooksLoading: loading, saveWebhook, editWebhook } = settingsStore; + const write = ({ target: { value, name } }) => editWebhook({ [name]: value }); - const save = () => { - saveWebhook(webhook).then(() => { - props.onClose(); - }); - }; + const save = () => { + saveWebhook(webhook) + .then(() => { + props.onClose(); + }) + .catch((e) => { + const baseStr = 'Error saving webhook'; + if (e.response) { + e.response.json().then(({ errors }) => { + toast.error(baseStr + ': ' + errors.join(',')); + }); + } else { + toast.error(baseStr); + } + }); + }; + return ( +
+

{webhook.exists() ? 'Update' : 'Add'} Webhook

+
+ + + + - return ( -
-

{webhook.exists() ? 'Update' : 'Add'} Webhook

- - - - - + + + + - - - - + + + + - - - - - -
-
- - {webhook.exists() && } -
- {webhook.exists() && - } -
- +
+
+ + {webhook.exists() && } +
+ {webhook.exists() && ( + + )}
- ); + +
+ ); } export default observer(WebhookForm); diff --git a/frontend/app/mstore/settingsStore.ts b/frontend/app/mstore/settingsStore.ts index 7dd584fd3..c31071694 100644 --- a/frontend/app/mstore/settingsStore.ts +++ b/frontend/app/mstore/settingsStore.ts @@ -6,7 +6,6 @@ import Webhook, { IWebhook } from 'Types/webhook'; import { webhookService } from 'App/services'; -import Alert, { IAlert } from "Types/alert"; export default class SettingsStore { loadingCaptureRate: boolean = false; @@ -73,8 +72,11 @@ export default class SettingsStore { this.webhookInst = new Webhook(data) if (inst.webhookId === undefined) this.setWebhooks([...this.webhooks, this.webhookInst]) else this.setWebhooks([...this.webhooks.filter(hook => hook.webhookId !== data.webhookId), this.webhookInst]) - this.hooksLoading = false + }) + .finally(() => { + this.hooksLoading = false + }) } setWebhooks = (webhooks: Webhook[]) => { diff --git a/frontend/app/services/WebhookService.ts b/frontend/app/services/WebhookService.ts index 2bcefa619..7b1073867 100644 --- a/frontend/app/services/WebhookService.ts +++ b/frontend/app/services/WebhookService.ts @@ -6,20 +6,17 @@ export default class WebhookService extends BaseService { return this.client.get('/webhooks') .then(r => r.json()) .then(j => j.data || []) - .catch(Promise.reject) } saveWebhook(inst: Webhook) { return this.client.put('/webhooks', inst) .then(r => r.json()) .then(j => j.data || {}) - .catch(Promise.reject) } removeWebhook(id: Webhook["webhookId"]) { return this.client.delete('/webhooks/' + id) .then(r => r.json()) .then(j => j.data || {}) - .catch(Promise.reject) } } \ No newline at end of file From 9459f4b679702ac4538d64dc80796ac0cbf83e9e Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 22 Feb 2023 14:25:03 +0100 Subject: [PATCH 098/218] fix(ui): remove consolelog --- .../shared/FetchDetailsModal/components/FetchTabs/FetchTabs.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/frontend/app/components/shared/FetchDetailsModal/components/FetchTabs/FetchTabs.tsx b/frontend/app/components/shared/FetchDetailsModal/components/FetchTabs/FetchTabs.tsx index 6efbb506d..c6495227b 100644 --- a/frontend/app/components/shared/FetchDetailsModal/components/FetchTabs/FetchTabs.tsx +++ b/frontend/app/components/shared/FetchDetailsModal/components/FetchTabs/FetchTabs.tsx @@ -62,7 +62,6 @@ function FetchTabs({ resource }: Props) { const [requestHeaders, setRequestHeaders] = useState | null>(null); const [responseHeaders, setResponseHeaders] = useState | null>(null); - console.log(resource) useEffect(() => { const { request, response } = resource; parseRequestResponse( From b2f4795745c9518fe97e56475262b6ff851cbd83 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 22 Feb 2023 17:18:17 +0100 Subject: [PATCH 099/218] fix(ui): fix form update --- frontend/app/mstore/alertsStore.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/app/mstore/alertsStore.ts b/frontend/app/mstore/alertsStore.ts index a2d155ffc..245be0bcf 100644 --- a/frontend/app/mstore/alertsStore.ts +++ b/frontend/app/mstore/alertsStore.ts @@ -79,10 +79,10 @@ export default class AlertsStore { edit = (diff: Partial) => { const key = Object.keys(diff)[0] - const oldInst = { ...this.instance } + const oldInst = this.instance // @ts-ignore oldInst[key] = diff[key] - this.instance = new Alert(oldInst, !!oldInst.alertId) + this.instance = oldInst } } From 6e16aacb56bccf0fed30dc79aad32e0cab4faa1e Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 22 Feb 2023 17:54:03 +0100 Subject: [PATCH 100/218] fix(ui): fix alert unit change --- .../components/Alerts/AlertForm/Condition.tsx | 4 +- .../components/Alerts/AlertListItem.tsx | 3 +- .../Dashboard/components/Alerts/NewAlert.tsx | 5 ++ frontend/app/mstore/alertsStore.ts | 76 ++++++++++--------- 4 files changed, 50 insertions(+), 38 deletions(-) diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertForm/Condition.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertForm/Condition.tsx index ba6956323..80a900895 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertForm/Condition.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertForm/Condition.tsx @@ -26,6 +26,7 @@ interface ICondition { writeQuery: (data: any) => void; writeQueryOption: (e: any, data: any) => void; unit: any; + changeUnit: (value: string) => void; } function Condition({ @@ -36,6 +37,7 @@ function Condition({ writeQueryOption, writeQuery, unit, + changeUnit, }: ICondition) { return (
@@ -48,7 +50,7 @@ function Condition({ options={changeOptions} name="change" defaultValue={instance.change} - onChange={({ value }) => writeOption(null, { name: 'change', value })} + onChange={({ value }) => changeUnit(value)} id="change-dropdown" />
diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx index 071dd204c..8137b7750 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx @@ -8,6 +8,7 @@ import { DateTime } from 'luxon'; import { withRouter, RouteComponentProps } from 'react-router-dom'; import cn from 'classnames'; import Alert from 'Types/alert'; +import { observer } from 'mobx-react-lite' const getThreshold = (threshold: number) => { if (threshold === 15) return '15 Minutes'; @@ -165,4 +166,4 @@ function AlertListItem(props: Props) { ); } -export default withRouter(AlertListItem); +export default withRouter(observer(AlertListItem)); diff --git a/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx b/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx index 67a6bb459..4d1d247b0 100644 --- a/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx @@ -167,6 +167,10 @@ const NewAlert = (props: IProps) => { edit({ query: { ...query, [name]: value } }); }; + const changeUnit = (value: string) => { + alertsStore.changeUnit(value) + } + const writeQuery = ({ target: { value, name } }: React.ChangeEvent) => { const { query } = instance; edit({ query: { ...query, [name]: value } }); @@ -243,6 +247,7 @@ const NewAlert = (props: IProps) => { instance={instance} triggerOptions={triggerOptions} writeQueryOption={writeQueryOption} + changeUnit={changeUnit} writeQuery={writeQuery} unit={unit} /> diff --git a/frontend/app/mstore/alertsStore.ts b/frontend/app/mstore/alertsStore.ts index 245be0bcf..33665f861 100644 --- a/frontend/app/mstore/alertsStore.ts +++ b/frontend/app/mstore/alertsStore.ts @@ -1,14 +1,14 @@ -import { makeAutoObservable } from 'mobx' -import Alert, { IAlert } from 'Types/alert' -import { alertsService } from 'App/services' +import { makeAutoObservable, action } from 'mobx'; +import Alert, { IAlert } from 'Types/alert'; +import { alertsService } from 'App/services'; export default class AlertsStore { alerts: Alert[] = []; - triggerOptions: { label: string, value: string | number, unit?: string }[] = []; + triggerOptions: { label: string; value: string | number; unit?: string }[] = []; alertsSearch = ''; - // @ts-ignore + // @ts-ignore instance: Alert = new Alert({}, false); - loading = false + loading = false; page: number = 1; constructor() { @@ -18,71 +18,75 @@ export default class AlertsStore { changeSearch = (value: string) => { this.alertsSearch = value; this.page = 1; - } + }; // TODO: remove it updateKey(key: string, value: any) { // @ts-ignore - this[key] = value + this[key] = value; } fetchList = async () => { - this.loading = true + this.loading = true; try { const list = await alertsService.fetchList(); - this.alerts = list.map(alert => new Alert(alert, true)); + this.alerts = list.map((alert) => new Alert(alert, true)); } catch (e) { - console.error(e) + console.error(e); } finally { - this.loading = false + this.loading = false; } - } + }; save = async (inst: Alert) => { - this.loading = true + this.loading = true; try { - await alertsService.save(inst ? inst : this.instance) - this.instance.isExists = true + await alertsService.save(inst ? inst : this.instance); + this.instance.isExists = true; } catch (e) { - console.error(e) + console.error(e); } finally { - this.loading = false + this.loading = false; } - } + }; remove = async (id: string) => { - this.loading = true + this.loading = true; try { - await alertsService.remove(id) + await alertsService.remove(id); } catch (e) { - console.error(e) + console.error(e); } finally { - this.loading = false + this.loading = false; } - } + }; fetchTriggerOptions = async () => { - this.loading = true + this.loading = true; try { const options = await alertsService.fetchTriggerOptions(); - this.triggerOptions = options.map(({ name, value }) => ({ label: name, value })) + this.triggerOptions = options.map(({ name, value }) => ({ label: name, value })); } catch (e) { - console.error(e) + console.error(e); } finally { - this.loading = false + this.loading = false; } - } + }; init = (inst: Partial | Alert) => { - this.instance = inst instanceof Alert ? inst : new Alert(inst, false) - } + this.instance = inst instanceof Alert ? inst : new Alert(inst, false); + }; edit = (diff: Partial) => { - const key = Object.keys(diff)[0] - const oldInst = this.instance + const key = Object.keys(diff)[0]; + const oldInst = this.instance; // @ts-ignore - oldInst[key] = diff[key] + oldInst[key] = diff[key]; - this.instance = oldInst - } + this.instance = oldInst; + }; + + changeUnit = ({ value }: { value: string }) => { + this.instance.change = value; + }; } From 791ccaa82b8496ef8a2f805225df7ff5e7072aac Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Thu, 23 Feb 2023 11:03:47 +0100 Subject: [PATCH 101/218] fix(ui) - tooltip text --- .../Dashboard/components/DashboardOptions/DashboardOptions.tsx | 2 +- frontend/app/components/ui/ItemMenu/ItemMenu.tsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/app/components/Dashboard/components/DashboardOptions/DashboardOptions.tsx b/frontend/app/components/Dashboard/components/DashboardOptions/DashboardOptions.tsx index b006dbb22..63b1c3f35 100644 --- a/frontend/app/components/Dashboard/components/DashboardOptions/DashboardOptions.tsx +++ b/frontend/app/components/Dashboard/components/DashboardOptions/DashboardOptions.tsx @@ -17,7 +17,7 @@ function DashboardOptions(props: Props) { { icon: 'text-paragraph', text: `${!isTitlePresent ? 'Add' : 'Edit'} Description`, onClick: () => editHandler(false) }, { icon: 'users', text: 'Visibility & Access', onClick: editHandler }, { icon: 'trash', text: 'Delete', onClick: deleteHandler }, - { icon: 'pdf-download', text: 'Download Report', onClick: renderReport, disabled: !isEnterprise, tooltipTitle: {ENTERPRISE_REQUEIRED} } + { icon: 'pdf-download', text: 'Download Report', onClick: renderReport, disabled: !isEnterprise, tooltipTitle: ENTERPRISE_REQUEIRED } ] return ( diff --git a/frontend/app/components/ui/ItemMenu/ItemMenu.tsx b/frontend/app/components/ui/ItemMenu/ItemMenu.tsx index fcb7e6467..bd8ecee28 100644 --- a/frontend/app/components/ui/ItemMenu/ItemMenu.tsx +++ b/frontend/app/components/ui/ItemMenu/ItemMenu.tsx @@ -68,7 +68,7 @@ export default class ItemMenu extends React.PureComponent { {items .filter(({ hidden }) => !hidden) .map(({ onClick, text, icon, disabled = false, tooltipTitle = '' }) => ( - +
{}} From 69c2f3f291ce38530ca7a2eb7017a407993321dd Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Thu, 23 Feb 2023 11:55:56 +0100 Subject: [PATCH 102/218] change(tracker): 5.0.0 release --- tracker/tracker/CHANGELOG.md | 3 ++- tracker/tracker/package.json | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/tracker/tracker/CHANGELOG.md b/tracker/tracker/CHANGELOG.md index 9d12152f6..559e4e865 100644 --- a/tracker/tracker/CHANGELOG.md +++ b/tracker/tracker/CHANGELOG.md @@ -1,10 +1,11 @@ -## 4.1.10 +## 5.0.0 - Added "tel" to supported input types - Added `{ withCurrentTime: true }` to `tracker.getSessionURL` method which will return sessionURL with current session's timestamp - Added Network module that captures fetch/xhr by default (with no plugin required) - Use `timeOrigin()` instead of `performance.timing.navigationStart` in ResourceTiming messages - Added app restart when service worker died after inactivity (mobile safari) +- **[breaking]** string dictionary to reduce session size ## 4.1.8 diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index 5682ffc1c..c45c15e4a 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "4.1.10", + "version": "5.0.0", "keywords": [ "logging", "replay" From 9f19800fa1792c3d09060cd28a768261a8cbf50b Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Thu, 23 Feb 2023 11:58:16 +0100 Subject: [PATCH 103/218] change(tracker): assist 5.0.0 --- tracker/tracker-assist/CHANGELOG.md | 2 +- tracker/tracker-assist/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tracker/tracker-assist/CHANGELOG.md b/tracker/tracker-assist/CHANGELOG.md index 7ce338e9b..f61aad123 100644 --- a/tracker/tracker-assist/CHANGELOG.md +++ b/tracker/tracker-assist/CHANGELOG.md @@ -1,4 +1,4 @@ -## 4.1.6 +## 5.0.0 - fix recording state import diff --git a/tracker/tracker-assist/package.json b/tracker/tracker-assist/package.json index 8b9dcd8ef..aaa80429d 100644 --- a/tracker/tracker-assist/package.json +++ b/tracker/tracker-assist/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-assist", "description": "Tracker plugin for screen assistance through the WebRTC", - "version": "4.1.6", + "version": "5.0.0", "keywords": [ "WebRTC", "assistance", From fc4c7704da592f39a2caf6e8b3f39afc74c1d5d7 Mon Sep 17 00:00:00 2001 From: Mehdi Osman Date: Thu, 23 Feb 2023 08:18:31 -0500 Subject: [PATCH 104/218] Updated tracker minVersion --- frontend/.env.sample | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/.env.sample b/frontend/.env.sample index 88f79bdb6..4b6cface2 100644 --- a/frontend/.env.sample +++ b/frontend/.env.sample @@ -23,4 +23,4 @@ MINIO_SECRET_KEY = '' # APP and TRACKER VERSIONS VERSION = '1.10.0' -TRACKER_VERSION = '4.1.10' +TRACKER_VERSION = '5.0.0' From a7062ad00baae7da42aa8665e69082b949f51657 Mon Sep 17 00:00:00 2001 From: Mehdi Osman Date: Thu, 23 Feb 2023 08:18:54 -0500 Subject: [PATCH 105/218] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 363c64d1c..05608a3c1 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,7 @@ OpenReplay is a session replay suite you can host yourself, that lets you see what users do on your web app, helping you troubleshoot issues faster. It's the only open-source alternative to products such as FullStory and LogRocket. - **Session replay.** OpenReplay replays what users do, but not only. It also shows you what went under the hood, how your website or app behaves by capturing network activity, console logs, JS errors, store actions/state, page speed metrics, cpu/memory usage and much more. -- **Low footprint**. With a ~18KB (.gz) tracker that asynchronously sends minimal data for a very limited impact on performance. +- **Low footprint**. With a ~19KB (.gz) tracker that asynchronously sends minimal data for a very limited impact on performance. - **Self-hosted**. No more security compliance checks, 3rd-parties processing user data. Everything OpenReplay captures stays in your cloud for a complete control over your data. - **Privacy controls**. Fine-grained security features for sanitizing user data. - **Easy deploy**. With support of major public cloud providers (AWS, GCP, Azure, DigitalOcean). From 15eb5d53a12156c57fd9ce5daabb6f24fae3b6a8 Mon Sep 17 00:00:00 2001 From: Alexander Zavorotynskiy Date: Thu, 23 Feb 2023 14:20:50 +0100 Subject: [PATCH 106/218] feat(backend): upgraded /x/text library --- backend/go.mod | 2 +- backend/go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/go.mod b/backend/go.mod index 161513ed8..9633f2b18 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -62,7 +62,7 @@ require ( golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 // indirect golang.org/x/sync v0.0.0-20220513210516-0976fa681c29 // indirect golang.org/x/sys v0.1.0 // indirect - golang.org/x/text v0.4.0 // indirect + golang.org/x/text v0.7.0 // indirect golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd // indirect diff --git a/backend/go.sum b/backend/go.sum index de6d507d3..676cf479b 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -715,8 +715,8 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.4.0 h1:BrVqGRd7+k1DiOgtnFvAkoQEWQvBc25ouMJM6429SFg= -golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= From 3bfa7573e91b1495e6657857079268cd3713f339 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Thu, 23 Feb 2023 16:01:47 +0100 Subject: [PATCH 107/218] fix(helm): fix chalice pg hardcoded port Signed-off-by: rjshrjndrn --- .../openreplay/charts/chalice/templates/deployment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml index 586b43293..29d311a25 100644 --- a/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml @@ -66,7 +66,7 @@ spec: - name: pg_host value: '{{ .Values.global.postgresql.postgresqlHost }}' - name: pg_port - value: "5432" + value: '{{ .Values.global.postgresql.postgresqlPort }}' - name: pg_dbname value: "{{ .Values.global.postgresql.postgresqlDatabase }}" - name: pg_user From 87d842ba43439930731a5f692211e88529f5333d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 23 Feb 2023 17:08:11 +0100 Subject: [PATCH 108/218] feat(chalice): cleaned code --- api/chalicelib/core/users.py | 8 -------- ee/api/chalicelib/core/users.py | 13 ------------- 2 files changed, 21 deletions(-) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index e5ae6e72b..c4933f92c 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -514,14 +514,6 @@ def set_password_invitation(user_id, new_password): } -def count_members(): - with pg_client.PostgresClient() as cur: - cur.execute("""SELECT COUNT(user_id) - FROM public.users WHERE deleted_at IS NULL;""") - r = cur.fetchone() - return r["count"] - - def email_exists(email): with pg_client.PostgresClient() as cur: cur.execute( diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index d2b13535a..ff357113f 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -591,19 +591,6 @@ def set_password_invitation(tenant_id, user_id, new_password): } -def count_members(tenant_id): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify( - """SELECT - COUNT(user_id) - FROM public.users WHERE tenant_id = %(tenant_id)s AND deleted_at IS NULL;""", - {"tenant_id": tenant_id}) - ) - r = cur.fetchone() - return r["count"] - - def email_exists(email): with pg_client.PostgresClient() as cur: cur.execute( From 4e8cb33727f7e9eace9fdffb3f3e0f15e66c0fa5 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 22 Feb 2023 17:18:17 +0100 Subject: [PATCH 109/218] fix(ui): fix form update --- frontend/app/mstore/alertsStore.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/app/mstore/alertsStore.ts b/frontend/app/mstore/alertsStore.ts index a2d155ffc..245be0bcf 100644 --- a/frontend/app/mstore/alertsStore.ts +++ b/frontend/app/mstore/alertsStore.ts @@ -79,10 +79,10 @@ export default class AlertsStore { edit = (diff: Partial) => { const key = Object.keys(diff)[0] - const oldInst = { ...this.instance } + const oldInst = this.instance // @ts-ignore oldInst[key] = diff[key] - this.instance = new Alert(oldInst, !!oldInst.alertId) + this.instance = oldInst } } From 5693896a99c8820ce1a8968aefff6bc949e50ed8 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 22 Feb 2023 17:54:03 +0100 Subject: [PATCH 110/218] fix(ui): fix alert unit change --- .../components/Alerts/AlertForm/Condition.tsx | 4 +- .../components/Alerts/AlertListItem.tsx | 3 +- .../Dashboard/components/Alerts/NewAlert.tsx | 5 ++ frontend/app/mstore/alertsStore.ts | 76 ++++++++++--------- 4 files changed, 50 insertions(+), 38 deletions(-) diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertForm/Condition.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertForm/Condition.tsx index ba6956323..80a900895 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertForm/Condition.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertForm/Condition.tsx @@ -26,6 +26,7 @@ interface ICondition { writeQuery: (data: any) => void; writeQueryOption: (e: any, data: any) => void; unit: any; + changeUnit: (value: string) => void; } function Condition({ @@ -36,6 +37,7 @@ function Condition({ writeQueryOption, writeQuery, unit, + changeUnit, }: ICondition) { return (
@@ -48,7 +50,7 @@ function Condition({ options={changeOptions} name="change" defaultValue={instance.change} - onChange={({ value }) => writeOption(null, { name: 'change', value })} + onChange={({ value }) => changeUnit(value)} id="change-dropdown" />
diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx index 071dd204c..8137b7750 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx @@ -8,6 +8,7 @@ import { DateTime } from 'luxon'; import { withRouter, RouteComponentProps } from 'react-router-dom'; import cn from 'classnames'; import Alert from 'Types/alert'; +import { observer } from 'mobx-react-lite' const getThreshold = (threshold: number) => { if (threshold === 15) return '15 Minutes'; @@ -165,4 +166,4 @@ function AlertListItem(props: Props) { ); } -export default withRouter(AlertListItem); +export default withRouter(observer(AlertListItem)); diff --git a/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx b/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx index 67a6bb459..4d1d247b0 100644 --- a/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx @@ -167,6 +167,10 @@ const NewAlert = (props: IProps) => { edit({ query: { ...query, [name]: value } }); }; + const changeUnit = (value: string) => { + alertsStore.changeUnit(value) + } + const writeQuery = ({ target: { value, name } }: React.ChangeEvent) => { const { query } = instance; edit({ query: { ...query, [name]: value } }); @@ -243,6 +247,7 @@ const NewAlert = (props: IProps) => { instance={instance} triggerOptions={triggerOptions} writeQueryOption={writeQueryOption} + changeUnit={changeUnit} writeQuery={writeQuery} unit={unit} /> diff --git a/frontend/app/mstore/alertsStore.ts b/frontend/app/mstore/alertsStore.ts index 245be0bcf..33665f861 100644 --- a/frontend/app/mstore/alertsStore.ts +++ b/frontend/app/mstore/alertsStore.ts @@ -1,14 +1,14 @@ -import { makeAutoObservable } from 'mobx' -import Alert, { IAlert } from 'Types/alert' -import { alertsService } from 'App/services' +import { makeAutoObservable, action } from 'mobx'; +import Alert, { IAlert } from 'Types/alert'; +import { alertsService } from 'App/services'; export default class AlertsStore { alerts: Alert[] = []; - triggerOptions: { label: string, value: string | number, unit?: string }[] = []; + triggerOptions: { label: string; value: string | number; unit?: string }[] = []; alertsSearch = ''; - // @ts-ignore + // @ts-ignore instance: Alert = new Alert({}, false); - loading = false + loading = false; page: number = 1; constructor() { @@ -18,71 +18,75 @@ export default class AlertsStore { changeSearch = (value: string) => { this.alertsSearch = value; this.page = 1; - } + }; // TODO: remove it updateKey(key: string, value: any) { // @ts-ignore - this[key] = value + this[key] = value; } fetchList = async () => { - this.loading = true + this.loading = true; try { const list = await alertsService.fetchList(); - this.alerts = list.map(alert => new Alert(alert, true)); + this.alerts = list.map((alert) => new Alert(alert, true)); } catch (e) { - console.error(e) + console.error(e); } finally { - this.loading = false + this.loading = false; } - } + }; save = async (inst: Alert) => { - this.loading = true + this.loading = true; try { - await alertsService.save(inst ? inst : this.instance) - this.instance.isExists = true + await alertsService.save(inst ? inst : this.instance); + this.instance.isExists = true; } catch (e) { - console.error(e) + console.error(e); } finally { - this.loading = false + this.loading = false; } - } + }; remove = async (id: string) => { - this.loading = true + this.loading = true; try { - await alertsService.remove(id) + await alertsService.remove(id); } catch (e) { - console.error(e) + console.error(e); } finally { - this.loading = false + this.loading = false; } - } + }; fetchTriggerOptions = async () => { - this.loading = true + this.loading = true; try { const options = await alertsService.fetchTriggerOptions(); - this.triggerOptions = options.map(({ name, value }) => ({ label: name, value })) + this.triggerOptions = options.map(({ name, value }) => ({ label: name, value })); } catch (e) { - console.error(e) + console.error(e); } finally { - this.loading = false + this.loading = false; } - } + }; init = (inst: Partial | Alert) => { - this.instance = inst instanceof Alert ? inst : new Alert(inst, false) - } + this.instance = inst instanceof Alert ? inst : new Alert(inst, false); + }; edit = (diff: Partial) => { - const key = Object.keys(diff)[0] - const oldInst = this.instance + const key = Object.keys(diff)[0]; + const oldInst = this.instance; // @ts-ignore - oldInst[key] = diff[key] + oldInst[key] = diff[key]; - this.instance = oldInst - } + this.instance = oldInst; + }; + + changeUnit = ({ value }: { value: string }) => { + this.instance.change = value; + }; } From 728917f0d6c2bb590c5afab694046778e853dfa6 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Thu, 23 Feb 2023 11:03:47 +0100 Subject: [PATCH 111/218] fix(ui) - tooltip text --- .../Dashboard/components/DashboardOptions/DashboardOptions.tsx | 2 +- frontend/app/components/ui/ItemMenu/ItemMenu.tsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/app/components/Dashboard/components/DashboardOptions/DashboardOptions.tsx b/frontend/app/components/Dashboard/components/DashboardOptions/DashboardOptions.tsx index b006dbb22..63b1c3f35 100644 --- a/frontend/app/components/Dashboard/components/DashboardOptions/DashboardOptions.tsx +++ b/frontend/app/components/Dashboard/components/DashboardOptions/DashboardOptions.tsx @@ -17,7 +17,7 @@ function DashboardOptions(props: Props) { { icon: 'text-paragraph', text: `${!isTitlePresent ? 'Add' : 'Edit'} Description`, onClick: () => editHandler(false) }, { icon: 'users', text: 'Visibility & Access', onClick: editHandler }, { icon: 'trash', text: 'Delete', onClick: deleteHandler }, - { icon: 'pdf-download', text: 'Download Report', onClick: renderReport, disabled: !isEnterprise, tooltipTitle: {ENTERPRISE_REQUEIRED} } + { icon: 'pdf-download', text: 'Download Report', onClick: renderReport, disabled: !isEnterprise, tooltipTitle: ENTERPRISE_REQUEIRED } ] return ( diff --git a/frontend/app/components/ui/ItemMenu/ItemMenu.tsx b/frontend/app/components/ui/ItemMenu/ItemMenu.tsx index fcb7e6467..bd8ecee28 100644 --- a/frontend/app/components/ui/ItemMenu/ItemMenu.tsx +++ b/frontend/app/components/ui/ItemMenu/ItemMenu.tsx @@ -68,7 +68,7 @@ export default class ItemMenu extends React.PureComponent { {items .filter(({ hidden }) => !hidden) .map(({ onClick, text, icon, disabled = false, tooltipTitle = '' }) => ( - +
{}} From db482a8ddd462a66efebce714f0614182e5fabee Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Thu, 23 Feb 2023 11:55:56 +0100 Subject: [PATCH 112/218] change(tracker): 5.0.0 release --- tracker/tracker/CHANGELOG.md | 3 ++- tracker/tracker/package.json | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/tracker/tracker/CHANGELOG.md b/tracker/tracker/CHANGELOG.md index 9d12152f6..559e4e865 100644 --- a/tracker/tracker/CHANGELOG.md +++ b/tracker/tracker/CHANGELOG.md @@ -1,10 +1,11 @@ -## 4.1.10 +## 5.0.0 - Added "tel" to supported input types - Added `{ withCurrentTime: true }` to `tracker.getSessionURL` method which will return sessionURL with current session's timestamp - Added Network module that captures fetch/xhr by default (with no plugin required) - Use `timeOrigin()` instead of `performance.timing.navigationStart` in ResourceTiming messages - Added app restart when service worker died after inactivity (mobile safari) +- **[breaking]** string dictionary to reduce session size ## 4.1.8 diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index 5682ffc1c..c45c15e4a 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "4.1.10", + "version": "5.0.0", "keywords": [ "logging", "replay" From f3e0293d7775467108f6e3d401720eee8254153f Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Thu, 23 Feb 2023 11:58:16 +0100 Subject: [PATCH 113/218] change(tracker): assist 5.0.0 --- tracker/tracker-assist/CHANGELOG.md | 2 +- tracker/tracker-assist/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tracker/tracker-assist/CHANGELOG.md b/tracker/tracker-assist/CHANGELOG.md index 7ce338e9b..f61aad123 100644 --- a/tracker/tracker-assist/CHANGELOG.md +++ b/tracker/tracker-assist/CHANGELOG.md @@ -1,4 +1,4 @@ -## 4.1.6 +## 5.0.0 - fix recording state import diff --git a/tracker/tracker-assist/package.json b/tracker/tracker-assist/package.json index 8b9dcd8ef..aaa80429d 100644 --- a/tracker/tracker-assist/package.json +++ b/tracker/tracker-assist/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-assist", "description": "Tracker plugin for screen assistance through the WebRTC", - "version": "4.1.6", + "version": "5.0.0", "keywords": [ "WebRTC", "assistance", From dbc644826986c2df95ef1939bba29d2efc8e8ff9 Mon Sep 17 00:00:00 2001 From: Mehdi Osman Date: Thu, 23 Feb 2023 08:18:31 -0500 Subject: [PATCH 114/218] Updated tracker minVersion --- frontend/.env.sample | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/.env.sample b/frontend/.env.sample index 88f79bdb6..4b6cface2 100644 --- a/frontend/.env.sample +++ b/frontend/.env.sample @@ -23,4 +23,4 @@ MINIO_SECRET_KEY = '' # APP and TRACKER VERSIONS VERSION = '1.10.0' -TRACKER_VERSION = '4.1.10' +TRACKER_VERSION = '5.0.0' From dc3a5bd875ee0f2f7344b5d3aa34c2be00dbb865 Mon Sep 17 00:00:00 2001 From: Mehdi Osman Date: Thu, 23 Feb 2023 08:18:54 -0500 Subject: [PATCH 115/218] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 363c64d1c..05608a3c1 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,7 @@ OpenReplay is a session replay suite you can host yourself, that lets you see what users do on your web app, helping you troubleshoot issues faster. It's the only open-source alternative to products such as FullStory and LogRocket. - **Session replay.** OpenReplay replays what users do, but not only. It also shows you what went under the hood, how your website or app behaves by capturing network activity, console logs, JS errors, store actions/state, page speed metrics, cpu/memory usage and much more. -- **Low footprint**. With a ~18KB (.gz) tracker that asynchronously sends minimal data for a very limited impact on performance. +- **Low footprint**. With a ~19KB (.gz) tracker that asynchronously sends minimal data for a very limited impact on performance. - **Self-hosted**. No more security compliance checks, 3rd-parties processing user data. Everything OpenReplay captures stays in your cloud for a complete control over your data. - **Privacy controls**. Fine-grained security features for sanitizing user data. - **Easy deploy**. With support of major public cloud providers (AWS, GCP, Azure, DigitalOcean). From 6af59e64481421e1b47b1dc960f9140ac6784745 Mon Sep 17 00:00:00 2001 From: Alexander Zavorotynskiy Date: Thu, 23 Feb 2023 14:20:50 +0100 Subject: [PATCH 116/218] feat(backend): upgraded /x/text library --- backend/go.mod | 2 +- backend/go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/go.mod b/backend/go.mod index 161513ed8..9633f2b18 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -62,7 +62,7 @@ require ( golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 // indirect golang.org/x/sync v0.0.0-20220513210516-0976fa681c29 // indirect golang.org/x/sys v0.1.0 // indirect - golang.org/x/text v0.4.0 // indirect + golang.org/x/text v0.7.0 // indirect golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd // indirect diff --git a/backend/go.sum b/backend/go.sum index de6d507d3..676cf479b 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -715,8 +715,8 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.4.0 h1:BrVqGRd7+k1DiOgtnFvAkoQEWQvBc25ouMJM6429SFg= -golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= From c9789ed99ab6312d1f0c29e8ed67db7db1f37790 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Thu, 23 Feb 2023 16:01:47 +0100 Subject: [PATCH 117/218] fix(helm): fix chalice pg hardcoded port Signed-off-by: rjshrjndrn --- .../openreplay/charts/chalice/templates/deployment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml index 586b43293..29d311a25 100644 --- a/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml @@ -66,7 +66,7 @@ spec: - name: pg_host value: '{{ .Values.global.postgresql.postgresqlHost }}' - name: pg_port - value: "5432" + value: '{{ .Values.global.postgresql.postgresqlPort }}' - name: pg_dbname value: "{{ .Values.global.postgresql.postgresqlDatabase }}" - name: pg_user From 3f42f7b9e782113f80da393559a32d1adcdb0021 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Thu, 23 Feb 2023 18:08:09 +0100 Subject: [PATCH 118/218] chore(helm): Adding support for global env variables --- .../charts/alerts/templates/deployment.yaml | 4 +++ .../charts/assets/templates/deployment.yaml | 4 +++ .../charts/assist/templates/deployment.yaml | 4 +++ .../charts/chalice/templates/deployment.yaml | 4 +++ .../charts/db/templates/deployment.yaml | 4 +++ .../charts/ender/templates/deployment.yaml | 4 +++ .../charts/frontend/templates/deployment.yaml | 4 +++ .../heuristics/templates/deployment.yaml | 4 +++ .../charts/http/templates/deployment.yaml | 4 +++ .../integrations/templates/deployment.yaml | 4 +++ .../charts/peers/templates/deployment.yaml | 4 +++ .../charts/quickwit/templates/deployment.yaml | 4 +++ .../charts/sink/templates/deployment.yaml | 4 +++ .../sourcemapreader/templates/deployment.yaml | 4 +++ .../charts/storage/templates/deployment.yaml | 4 +++ .../helmcharts/openreplay/templates/job.yaml | 28 +++++++++++++++++++ scripts/helmcharts/openreplay/values.yaml | 2 ++ 17 files changed, 90 insertions(+) diff --git a/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml index 4afed4367..d4c1d6e49 100644 --- a/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml @@ -116,6 +116,10 @@ spec: value: '{{ .Values.global.email.emailSslCert }}' - name: EMAIL_FROM value: '{{ .Values.global.email.emailFrom }}' + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml index f66479475..f959adc13 100644 --- a/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml @@ -94,6 +94,10 @@ spec: value: '{{ .Values.global.s3.endpoint }}/{{.Values.global.s3.assetsBucket}}' {{- end }} {{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml index e153e50c3..92ae9a93c 100644 --- a/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml @@ -75,6 +75,10 @@ spec: {{- end }} - name: REDIS_URL value: {{ .Values.global.redis.redisHost }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml index 29d311a25..a15553a8a 100644 --- a/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml @@ -134,6 +134,10 @@ spec: value: '{{ .Values.global.email.emailSslCert }}' - name: EMAIL_FROM value: '{{ .Values.global.email.emailFrom }}' + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/db/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/db/templates/deployment.yaml index 63182fbac..90e971c8d 100644 --- a/scripts/helmcharts/openreplay/charts/db/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/db/templates/deployment.yaml @@ -69,6 +69,10 @@ spec: - name: POSTGRES_STRING value: 'postgres://{{ .Values.global.postgresql.postgresqlUser }}:$(pg_password)@{{ .Values.global.postgresql.postgresqlHost }}:{{ .Values.global.postgresql.postgresqlPort }}/{{ .Values.global.postgresql.postgresqlDatabase }}' {{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/ender/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/ender/templates/deployment.yaml index e5b0a946b..fec4a808d 100644 --- a/scripts/helmcharts/openreplay/charts/ender/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/ender/templates/deployment.yaml @@ -61,6 +61,10 @@ spec: - name: POSTGRES_STRING value: 'postgres://{{ .Values.global.postgresql.postgresqlUser }}:$(pg_password)@{{ .Values.global.postgresql.postgresqlHost }}:{{ .Values.global.postgresql.postgresqlPort }}/{{ .Values.global.postgresql.postgresqlDatabase }}' {{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/frontend/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/frontend/templates/deployment.yaml index e5eb29441..f685b76bc 100644 --- a/scripts/helmcharts/openreplay/charts/frontend/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/frontend/templates/deployment.yaml @@ -101,6 +101,10 @@ spec: value: '{{ .Values.global.s3.endpoint }}/{{.Values.global.s3.assetsBucket}}' {{- end }} {{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/heuristics/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/heuristics/templates/deployment.yaml index 6d88fec7a..f545ff77f 100644 --- a/scripts/helmcharts/openreplay/charts/heuristics/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/heuristics/templates/deployment.yaml @@ -50,6 +50,10 @@ spec: - name: KAFKA_USE_SSL value: '{{ .Values.global.kafka.kafkaUseSsl }}' {{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml index 9f7d407bb..1add28054 100644 --- a/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml @@ -101,6 +101,10 @@ spec: value: '{{ .Values.global.s3.endpoint }}/{{.Values.global.s3.assetsBucket}}' {{- end }} {{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/integrations/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/integrations/templates/deployment.yaml index 0f9ead73c..522316d81 100644 --- a/scripts/helmcharts/openreplay/charts/integrations/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/integrations/templates/deployment.yaml @@ -61,6 +61,10 @@ spec: - name: POSTGRES_STRING value: 'postgres://{{ .Values.global.postgresql.postgresqlUser }}:$(pg_password)@{{ .Values.global.postgresql.postgresqlHost }}:{{ .Values.global.postgresql.postgresqlPort }}/{{ .Values.global.postgresql.postgresqlDatabase }}' {{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/peers/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/peers/templates/deployment.yaml index 2cbd395d9..98c290708 100644 --- a/scripts/helmcharts/openreplay/charts/peers/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/peers/templates/deployment.yaml @@ -54,6 +54,10 @@ spec: {{- else }} value: {{ .Values.global.s3.accessKey }} {{- end }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/quickwit/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/quickwit/templates/deployment.yaml index 3ac58c215..34c9ddd73 100644 --- a/scripts/helmcharts/openreplay/charts/quickwit/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/quickwit/templates/deployment.yaml @@ -57,6 +57,10 @@ spec: value: {{ .Values.global.s3.secretKey }} - name: QW_DATA_DIR value: /opt/openreplay/ + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} ports: {{- range $key, $val := .Values.service.ports }} - name: {{ $key }} diff --git a/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml index 7381541a1..88bd89c1f 100644 --- a/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml @@ -70,6 +70,10 @@ spec: value: '{{ .Values.global.s3.endpoint }}/{{.Values.global.s3.assetsBucket}}' {{- end }} {{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/sourcemapreader/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/sourcemapreader/templates/deployment.yaml index 7abca821c..1d8041c5b 100644 --- a/scripts/helmcharts/openreplay/charts/sourcemapreader/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/sourcemapreader/templates/deployment.yaml @@ -79,6 +79,10 @@ spec: # S3 compatible storage value: '{{ .Values.global.s3.endpoint }}/{{.Values.global.s3.assetsBucket}}' {{- end }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/storage/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/storage/templates/deployment.yaml index 9cb2cca22..aff40a227 100644 --- a/scripts/helmcharts/openreplay/charts/storage/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/storage/templates/deployment.yaml @@ -78,6 +78,10 @@ spec: - name: KAFKA_USE_SSL value: '{{ .Values.global.kafka.kafkaUseSsl }}' {{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/templates/job.yaml b/scripts/helmcharts/openreplay/templates/job.yaml index 095232a7d..3e0494d7f 100644 --- a/scripts/helmcharts/openreplay/templates/job.yaml +++ b/scripts/helmcharts/openreplay/templates/job.yaml @@ -35,6 +35,10 @@ spec: - name: git image: alpine/git env: + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: ENTERPRISE_EDITION_LICENSE value: "{{ .Values.global.enterpriseEditionLicense }}" command: @@ -107,6 +111,10 @@ spec: {{- else }} value: '{{ .Values.global.postgresql.postgresqlPassword }}' {{- end}} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} image: bitnami/postgresql:13.3.0-debian-10-r53 command: - /bin/bash @@ -122,6 +130,10 @@ spec: - name: minio image: bitnami/minio:2020.10.9-debian-10-r6 env: + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: FORCE_MIGRATION value: "{{ .Values.forceMigration }}" - name: UPGRADE_FRONTENT @@ -152,6 +164,10 @@ spec: {{- if .Values.vault.enabled }} - name: vault env: + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: FORCE_MIGRATION value: "{{ .Values.forceMigration }}" - name: PGHOST @@ -177,6 +193,10 @@ spec: mountPath: /opt/migrations/ - name: vault-s3-upload env: + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: AWS_ACCESS_KEY_ID value: "{{ .Values.global.s3.accessKey }}" - name: AWS_SECRET_ACCESS_KEY @@ -221,6 +241,10 @@ spec: - name: clickhouse image: clickhouse/clickhouse-server:22.12-alpine env: + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: FORCE_MIGRATION value: "{{ .Values.forceMigration }}" - name: PREVIOUS_APP_VERSION @@ -248,6 +272,10 @@ spec: - name: kafka image: bitnami/kafka:2.6.0-debian-10-r30 env: + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: RETENTION_TIME value: "{{ .Values.global.kafka.retentionTime }}" - name: KAFKA_HOST diff --git a/scripts/helmcharts/openreplay/values.yaml b/scripts/helmcharts/openreplay/values.yaml index f601d22da..694585180 100644 --- a/scripts/helmcharts/openreplay/values.yaml +++ b/scripts/helmcharts/openreplay/values.yaml @@ -37,3 +37,5 @@ global: vault: *vault redis: *redis clusterDomain: "svc.cluster.local" + # In case you've http proxy to access internet. + env: {} From 13b4fc1c12bf510d0d8cb67953066734e4514a59 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 24 Feb 2023 13:50:44 +0100 Subject: [PATCH 119/218] feat(DB): backup data before upgrade --- .../db/init_dbs/postgresql/1.10.0/1.10.0.sql | 145 ++++++++++++++++- .../db/init_dbs/postgresql/1.10.0/1.10.0.sql | 147 +++++++++++++++++- 2 files changed, 289 insertions(+), 3 deletions(-) diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql index d31b3cec7..6461a1214 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql @@ -5,6 +5,148 @@ $$ SELECT 'v1.10.0-ee' $$ LANGUAGE sql IMMUTABLE; +-- Backup dashboard & search data: +DO +$$ + BEGIN + IF NOT (SELECT EXISTS(SELECT schema_name + FROM information_schema.schemata + WHERE schema_name = 'backup_v1_10_0')) THEN + CREATE SCHEMA backup_v1_10_0; + CREATE TABLE backup_v1_10_0.dashboards + ( + dashboard_id integer, + project_id integer, + user_id integer, + name text NOT NULL, + description text NOT NULL DEFAULT '', + is_public boolean NOT NULL DEFAULT TRUE, + is_pinned boolean NOT NULL DEFAULT FALSE, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + deleted_at timestamp NULL DEFAULT NULL + ); + CREATE TABLE backup_v1_10_0.dashboard_widgets + ( + widget_id integer, + dashboard_id integer, + metric_id integer, + user_id integer, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + config jsonb NOT NULL DEFAULT '{}'::jsonb + ); + CREATE TABLE backup_v1_10_0.searches + ( + search_id integer, + project_id integer, + user_id integer, + name text not null, + filter jsonb not null, + created_at timestamp default timezone('utc'::text, now()) not null, + deleted_at timestamp, + is_public boolean NOT NULL DEFAULT False + ); + CREATE TABLE backup_v1_10_0.metrics + ( + metric_id integer, + project_id integer, + user_id integer, + name text NOT NULL, + is_public boolean NOT NULL DEFAULT FALSE, + active boolean NOT NULL DEFAULT TRUE, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + deleted_at timestamp, + edited_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + metric_type text NOT NULL, + view_type text NOT NULL, + metric_of text NOT NULL DEFAULT 'sessionCount', + metric_value text[] NOT NULL DEFAULT '{}'::text[], + metric_format text, + category text NULL DEFAULT 'custom', + is_pinned boolean NOT NULL DEFAULT FALSE, + is_predefined boolean NOT NULL DEFAULT FALSE, + is_template boolean NOT NULL DEFAULT FALSE, + predefined_key text NULL DEFAULT NULL, + default_config jsonb NOT NULL + ); + CREATE TABLE backup_v1_10_0.metric_series + ( + series_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + metric_id integer REFERENCES metrics (metric_id) ON DELETE CASCADE, + index integer NOT NULL, + name text NULL, + filter jsonb NOT NULL, + created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL, + deleted_at timestamp + ); + + INSERT INTO backup_v1_10_0.dashboards(dashboard_id, project_id, user_id, name, description, is_public, + is_pinned, + created_at, deleted_at) + SELECT dashboard_id, + project_id, + user_id, + name, + description, + is_public, + is_pinned, + created_at, + deleted_at + FROM public.dashboards + ORDER BY dashboard_id; + + INSERT INTO backup_v1_10_0.metrics(metric_id, project_id, user_id, name, is_public, active, created_at, + deleted_at, edited_at, metric_type, view_type, metric_of, metric_value, + metric_format, category, is_pinned, is_predefined, is_template, + predefined_key, default_config) + SELECT metric_id, + project_id, + user_id, + name, + is_public, + active, + created_at, + deleted_at, + edited_at, + metric_type, + view_type, + metric_of, + metric_value, + metric_format, + category, + is_pinned, + is_predefined, + is_template, + predefined_key, + default_config + FROM public.metrics + ORDER BY metric_id; + + INSERT INTO backup_v1_10_0.metric_series(series_id, metric_id, index, name, filter, created_at, deleted_at) + SELECT series_id, metric_id, index, name, filter, created_at, deleted_at + FROM public.metric_series + ORDER BY series_id; + + INSERT INTO backup_v1_10_0.dashboard_widgets(widget_id, dashboard_id, metric_id, user_id, created_at, config) + SELECT widget_id, dashboard_id, metric_id, user_id, created_at, config + FROM public.dashboard_widgets + ORDER BY widget_id; + + INSERT INTO backup_v1_10_0.searches(search_id, project_id, user_id, name, filter, created_at, deleted_at, + is_public) + SELECT search_id, + project_id, + user_id, + name, + filter, + created_at, + deleted_at, + is_public + FROM public.searches + ORDER BY search_id; + END IF; + END +$$ LANGUAGE plpgsql; + CREATE TABLE IF NOT EXISTS frontend_signals ( project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, @@ -119,7 +261,8 @@ $$ and column_name = 'is_predefined') THEN -- 0. change metric_of UPDATE metrics - SET metric_of=coalesce(replace(get_global_key(metric_of), '"', ''),left(metric_of, 1) || right(replace(initcap(metric_of), '_', ''), -1)) + SET metric_of=coalesce(replace(get_global_key(metric_of), '"', ''), + left(metric_of, 1) || right(replace(initcap(metric_of), '_', ''), -1)) WHERE not is_predefined; -- 1. pre transform structure diff --git a/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql b/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql index 23ac42d37..92c0964bb 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql @@ -5,6 +5,148 @@ $$ SELECT 'v1.10.0' $$ LANGUAGE sql IMMUTABLE; +-- Backup dashboard & search data: +DO +$$ + BEGIN + IF NOT (SELECT EXISTS(SELECT schema_name + FROM information_schema.schemata + WHERE schema_name = 'backup_v1_10_0')) THEN + CREATE SCHEMA backup_v1_10_0; + CREATE TABLE backup_v1_10_0.dashboards + ( + dashboard_id integer, + project_id integer, + user_id integer, + name text NOT NULL, + description text NOT NULL DEFAULT '', + is_public boolean NOT NULL DEFAULT TRUE, + is_pinned boolean NOT NULL DEFAULT FALSE, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + deleted_at timestamp NULL DEFAULT NULL + ); + CREATE TABLE backup_v1_10_0.dashboard_widgets + ( + widget_id integer, + dashboard_id integer, + metric_id integer, + user_id integer, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + config jsonb NOT NULL DEFAULT '{}'::jsonb + ); + CREATE TABLE backup_v1_10_0.searches + ( + search_id integer, + project_id integer, + user_id integer, + name text not null, + filter jsonb not null, + created_at timestamp default timezone('utc'::text, now()) not null, + deleted_at timestamp, + is_public boolean NOT NULL DEFAULT False + ); + CREATE TABLE backup_v1_10_0.metrics + ( + metric_id integer, + project_id integer, + user_id integer, + name text NOT NULL, + is_public boolean NOT NULL DEFAULT FALSE, + active boolean NOT NULL DEFAULT TRUE, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + deleted_at timestamp, + edited_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + metric_type text NOT NULL, + view_type text NOT NULL, + metric_of text NOT NULL DEFAULT 'sessionCount', + metric_value text[] NOT NULL DEFAULT '{}'::text[], + metric_format text, + category text NULL DEFAULT 'custom', + is_pinned boolean NOT NULL DEFAULT FALSE, + is_predefined boolean NOT NULL DEFAULT FALSE, + is_template boolean NOT NULL DEFAULT FALSE, + predefined_key text NULL DEFAULT NULL, + default_config jsonb NOT NULL + ); + CREATE TABLE backup_v1_10_0.metric_series + ( + series_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + metric_id integer REFERENCES metrics (metric_id) ON DELETE CASCADE, + index integer NOT NULL, + name text NULL, + filter jsonb NOT NULL, + created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL, + deleted_at timestamp + ); + + INSERT INTO backup_v1_10_0.dashboards(dashboard_id, project_id, user_id, name, description, is_public, + is_pinned, + created_at, deleted_at) + SELECT dashboard_id, + project_id, + user_id, + name, + description, + is_public, + is_pinned, + created_at, + deleted_at + FROM public.dashboards + ORDER BY dashboard_id; + + INSERT INTO backup_v1_10_0.metrics(metric_id, project_id, user_id, name, is_public, active, created_at, + deleted_at, edited_at, metric_type, view_type, metric_of, metric_value, + metric_format, category, is_pinned, is_predefined, is_template, + predefined_key, default_config) + SELECT metric_id, + project_id, + user_id, + name, + is_public, + active, + created_at, + deleted_at, + edited_at, + metric_type, + view_type, + metric_of, + metric_value, + metric_format, + category, + is_pinned, + is_predefined, + is_template, + predefined_key, + default_config + FROM public.metrics + ORDER BY metric_id; + + INSERT INTO backup_v1_10_0.metric_series(series_id, metric_id, index, name, filter, created_at, deleted_at) + SELECT series_id, metric_id, index, name, filter, created_at, deleted_at + FROM public.metric_series + ORDER BY series_id; + + INSERT INTO backup_v1_10_0.dashboard_widgets(widget_id, dashboard_id, metric_id, user_id, created_at, config) + SELECT widget_id, dashboard_id, metric_id, user_id, created_at, config + FROM public.dashboard_widgets + ORDER BY widget_id; + + INSERT INTO backup_v1_10_0.searches(search_id, project_id, user_id, name, filter, created_at, deleted_at, + is_public) + SELECT search_id, + project_id, + user_id, + name, + filter, + created_at, + deleted_at, + is_public + FROM public.searches + ORDER BY search_id; + END IF; + END +$$ LANGUAGE plpgsql; + ALTER TYPE webhook_type ADD VALUE IF NOT EXISTS 'msteams'; UPDATE metrics @@ -93,7 +235,8 @@ $$ and column_name = 'is_predefined') THEN -- 0. change metric_of UPDATE metrics - SET metric_of=coalesce(replace(get_global_key(metric_of), '"', ''),left(metric_of, 1) || right(replace(initcap(metric_of), '_', ''), -1)) + SET metric_of=coalesce(replace(get_global_key(metric_of), '"', ''), + left(metric_of, 1) || right(replace(initcap(metric_of), '_', ''), -1)) WHERE not is_predefined; -- 1. pre transform structure @@ -482,4 +625,4 @@ COMMIT; CREATE INDEX CONCURRENTLY IF NOT EXISTS clicks_selector_idx ON events.clicks (selector); CREATE INDEX CONCURRENTLY IF NOT EXISTS clicks_path_idx ON events.clicks (path); CREATE INDEX CONCURRENTLY IF NOT EXISTS clicks_path_gin_idx ON events.clicks USING GIN (path gin_trgm_ops); -CREATE INDEX CONCURRENTLY IF NOT EXISTS issues_project_id_issue_id_idx ON public.issues (project_id, issue_id); \ No newline at end of file +CREATE INDEX CONCURRENTLY IF NOT EXISTS issues_project_id_issue_id_idx ON public.issues (project_id, issue_id); From 58bb783c0c982fd55a25c352b8b5b9f1606332c9 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Thu, 23 Feb 2023 18:08:09 +0100 Subject: [PATCH 120/218] chore(helm): Adding support for global env variables --- .../charts/alerts/templates/deployment.yaml | 4 +++ .../charts/assets/templates/deployment.yaml | 4 +++ .../charts/assist/templates/deployment.yaml | 4 +++ .../charts/chalice/templates/deployment.yaml | 4 +++ .../charts/db/templates/deployment.yaml | 4 +++ .../charts/ender/templates/deployment.yaml | 4 +++ .../charts/frontend/templates/deployment.yaml | 4 +++ .../heuristics/templates/deployment.yaml | 4 +++ .../charts/http/templates/deployment.yaml | 4 +++ .../integrations/templates/deployment.yaml | 4 +++ .../charts/peers/templates/deployment.yaml | 4 +++ .../charts/quickwit/templates/deployment.yaml | 4 +++ .../charts/sink/templates/deployment.yaml | 4 +++ .../sourcemapreader/templates/deployment.yaml | 4 +++ .../charts/storage/templates/deployment.yaml | 4 +++ .../helmcharts/openreplay/templates/job.yaml | 28 +++++++++++++++++++ scripts/helmcharts/openreplay/values.yaml | 2 ++ 17 files changed, 90 insertions(+) diff --git a/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml index 4afed4367..d4c1d6e49 100644 --- a/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml @@ -116,6 +116,10 @@ spec: value: '{{ .Values.global.email.emailSslCert }}' - name: EMAIL_FROM value: '{{ .Values.global.email.emailFrom }}' + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml index f66479475..f959adc13 100644 --- a/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml @@ -94,6 +94,10 @@ spec: value: '{{ .Values.global.s3.endpoint }}/{{.Values.global.s3.assetsBucket}}' {{- end }} {{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml index e153e50c3..92ae9a93c 100644 --- a/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml @@ -75,6 +75,10 @@ spec: {{- end }} - name: REDIS_URL value: {{ .Values.global.redis.redisHost }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml index 29d311a25..a15553a8a 100644 --- a/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml @@ -134,6 +134,10 @@ spec: value: '{{ .Values.global.email.emailSslCert }}' - name: EMAIL_FROM value: '{{ .Values.global.email.emailFrom }}' + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/db/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/db/templates/deployment.yaml index 63182fbac..90e971c8d 100644 --- a/scripts/helmcharts/openreplay/charts/db/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/db/templates/deployment.yaml @@ -69,6 +69,10 @@ spec: - name: POSTGRES_STRING value: 'postgres://{{ .Values.global.postgresql.postgresqlUser }}:$(pg_password)@{{ .Values.global.postgresql.postgresqlHost }}:{{ .Values.global.postgresql.postgresqlPort }}/{{ .Values.global.postgresql.postgresqlDatabase }}' {{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/ender/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/ender/templates/deployment.yaml index e5b0a946b..fec4a808d 100644 --- a/scripts/helmcharts/openreplay/charts/ender/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/ender/templates/deployment.yaml @@ -61,6 +61,10 @@ spec: - name: POSTGRES_STRING value: 'postgres://{{ .Values.global.postgresql.postgresqlUser }}:$(pg_password)@{{ .Values.global.postgresql.postgresqlHost }}:{{ .Values.global.postgresql.postgresqlPort }}/{{ .Values.global.postgresql.postgresqlDatabase }}' {{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/frontend/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/frontend/templates/deployment.yaml index e5eb29441..f685b76bc 100644 --- a/scripts/helmcharts/openreplay/charts/frontend/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/frontend/templates/deployment.yaml @@ -101,6 +101,10 @@ spec: value: '{{ .Values.global.s3.endpoint }}/{{.Values.global.s3.assetsBucket}}' {{- end }} {{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/heuristics/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/heuristics/templates/deployment.yaml index 6d88fec7a..f545ff77f 100644 --- a/scripts/helmcharts/openreplay/charts/heuristics/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/heuristics/templates/deployment.yaml @@ -50,6 +50,10 @@ spec: - name: KAFKA_USE_SSL value: '{{ .Values.global.kafka.kafkaUseSsl }}' {{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml index 9f7d407bb..1add28054 100644 --- a/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml @@ -101,6 +101,10 @@ spec: value: '{{ .Values.global.s3.endpoint }}/{{.Values.global.s3.assetsBucket}}' {{- end }} {{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/integrations/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/integrations/templates/deployment.yaml index 0f9ead73c..522316d81 100644 --- a/scripts/helmcharts/openreplay/charts/integrations/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/integrations/templates/deployment.yaml @@ -61,6 +61,10 @@ spec: - name: POSTGRES_STRING value: 'postgres://{{ .Values.global.postgresql.postgresqlUser }}:$(pg_password)@{{ .Values.global.postgresql.postgresqlHost }}:{{ .Values.global.postgresql.postgresqlPort }}/{{ .Values.global.postgresql.postgresqlDatabase }}' {{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/peers/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/peers/templates/deployment.yaml index 2cbd395d9..98c290708 100644 --- a/scripts/helmcharts/openreplay/charts/peers/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/peers/templates/deployment.yaml @@ -54,6 +54,10 @@ spec: {{- else }} value: {{ .Values.global.s3.accessKey }} {{- end }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/quickwit/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/quickwit/templates/deployment.yaml index 3ac58c215..34c9ddd73 100644 --- a/scripts/helmcharts/openreplay/charts/quickwit/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/quickwit/templates/deployment.yaml @@ -57,6 +57,10 @@ spec: value: {{ .Values.global.s3.secretKey }} - name: QW_DATA_DIR value: /opt/openreplay/ + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} ports: {{- range $key, $val := .Values.service.ports }} - name: {{ $key }} diff --git a/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml index 7381541a1..88bd89c1f 100644 --- a/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml @@ -70,6 +70,10 @@ spec: value: '{{ .Values.global.s3.endpoint }}/{{.Values.global.s3.assetsBucket}}' {{- end }} {{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/sourcemapreader/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/sourcemapreader/templates/deployment.yaml index 7abca821c..1d8041c5b 100644 --- a/scripts/helmcharts/openreplay/charts/sourcemapreader/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/sourcemapreader/templates/deployment.yaml @@ -79,6 +79,10 @@ spec: # S3 compatible storage value: '{{ .Values.global.s3.endpoint }}/{{.Values.global.s3.assetsBucket}}' {{- end }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/charts/storage/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/storage/templates/deployment.yaml index 9cb2cca22..aff40a227 100644 --- a/scripts/helmcharts/openreplay/charts/storage/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/storage/templates/deployment.yaml @@ -78,6 +78,10 @@ spec: - name: KAFKA_USE_SSL value: '{{ .Values.global.kafka.kafkaUseSsl }}' {{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.env }} - name: {{ $key }} value: '{{ $val }}' diff --git a/scripts/helmcharts/openreplay/templates/job.yaml b/scripts/helmcharts/openreplay/templates/job.yaml index 095232a7d..3e0494d7f 100644 --- a/scripts/helmcharts/openreplay/templates/job.yaml +++ b/scripts/helmcharts/openreplay/templates/job.yaml @@ -35,6 +35,10 @@ spec: - name: git image: alpine/git env: + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: ENTERPRISE_EDITION_LICENSE value: "{{ .Values.global.enterpriseEditionLicense }}" command: @@ -107,6 +111,10 @@ spec: {{- else }} value: '{{ .Values.global.postgresql.postgresqlPassword }}' {{- end}} + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} image: bitnami/postgresql:13.3.0-debian-10-r53 command: - /bin/bash @@ -122,6 +130,10 @@ spec: - name: minio image: bitnami/minio:2020.10.9-debian-10-r6 env: + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: FORCE_MIGRATION value: "{{ .Values.forceMigration }}" - name: UPGRADE_FRONTENT @@ -152,6 +164,10 @@ spec: {{- if .Values.vault.enabled }} - name: vault env: + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: FORCE_MIGRATION value: "{{ .Values.forceMigration }}" - name: PGHOST @@ -177,6 +193,10 @@ spec: mountPath: /opt/migrations/ - name: vault-s3-upload env: + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: AWS_ACCESS_KEY_ID value: "{{ .Values.global.s3.accessKey }}" - name: AWS_SECRET_ACCESS_KEY @@ -221,6 +241,10 @@ spec: - name: clickhouse image: clickhouse/clickhouse-server:22.12-alpine env: + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: FORCE_MIGRATION value: "{{ .Values.forceMigration }}" - name: PREVIOUS_APP_VERSION @@ -248,6 +272,10 @@ spec: - name: kafka image: bitnami/kafka:2.6.0-debian-10-r30 env: + {{- range $key, $val := .Values.global.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: RETENTION_TIME value: "{{ .Values.global.kafka.retentionTime }}" - name: KAFKA_HOST diff --git a/scripts/helmcharts/openreplay/values.yaml b/scripts/helmcharts/openreplay/values.yaml index f601d22da..694585180 100644 --- a/scripts/helmcharts/openreplay/values.yaml +++ b/scripts/helmcharts/openreplay/values.yaml @@ -37,3 +37,5 @@ global: vault: *vault redis: *redis clusterDomain: "svc.cluster.local" + # In case you've http proxy to access internet. + env: {} From 594385290a834885d44e8356d87133a7d68df7e0 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 24 Feb 2023 15:12:58 +0100 Subject: [PATCH 121/218] feat(DB): added missing column --- ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql | 1 + scripts/schema/db/init_dbs/postgresql/init_schema.sql | 1 + 2 files changed, 2 insertions(+) diff --git a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql index c9cc4f87d..0b2945b39 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -747,6 +747,7 @@ $$ metric_value text[] NOT NULL DEFAULT '{}'::text[], metric_format text, thumbnail text, + is_pinned boolean NOT NULL DEFAULT FALSE, default_config jsonb NOT NULL DEFAULT '{ "col": 2, "row": 2, diff --git a/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/scripts/schema/db/init_dbs/postgresql/init_schema.sql index 350c5cbdb..57dea2a58 100644 --- a/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -854,6 +854,7 @@ $$ metric_value text[] NOT NULL DEFAULT '{}'::text[], metric_format text, thumbnail text, + is_pinned boolean NOT NULL DEFAULT FALSE, default_config jsonb NOT NULL DEFAULT '{ "col": 2, "row": 2, From 15ad4313fce0764705c0dd50717c3e4d8d2dc99a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 24 Feb 2023 15:22:18 +0100 Subject: [PATCH 122/218] feat(chalice): changed update dashboard response --- api/chalicelib/core/dashboards.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/api/chalicelib/core/dashboards.py b/api/chalicelib/core/dashboards.py index 93c2b8675..10143a5a4 100644 --- a/api/chalicelib/core/dashboards.py +++ b/api/chalicelib/core/dashboards.py @@ -114,17 +114,19 @@ def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashbo row = cur.fetchone() offset = row["count"] pg_query = f"""UPDATE dashboards - SET name = %(name)s, + SET name = %(name)s, description= %(description)s {", is_public = %(is_public)s" if data.is_public is not None else ""} {", is_pinned = %(is_pinned)s" if data.is_pinned is not None else ""} - WHERE dashboards.project_id = %(projectId)s + WHERE dashboards.project_id = %(projectId)s AND dashboard_id = %(dashboard_id)s - AND (dashboards.user_id = %(userId)s OR is_public)""" + AND (dashboards.user_id = %(userId)s OR is_public) + RETURNING dashboard_id,name,description,is_public,createdAt;""" if data.metrics is not None and len(data.metrics) > 0: pg_query = f"""WITH dash AS ({pg_query}) - INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config) - VALUES {",".join([f"(%(dashboard_id)s, %(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])};""" + INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config) + VALUES {",".join([f"(%(dashboard_id)s, %(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])} + RETURNING dash.*;""" for i, m in enumerate(data.metrics): params[f"metric_id_{i}"] = m # params[f"config_{i}"] = schemas.AddWidgetToDashboardPayloadSchema.schema() \ @@ -134,8 +136,9 @@ def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashbo params[f"config_{i}"] = json.dumps({"position": i + offset}) cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() - return {"success": True} + return helper.dict_to_camel_case(row) def get_widget(project_id, user_id, dashboard_id, widget_id): From 54fa17dc3f2eccba1270b97e4a95a30f8bd0d3e5 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 24 Feb 2023 15:31:57 +0100 Subject: [PATCH 123/218] feat(chalice): changed update dashboard response --- api/chalicelib/core/dashboards.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/dashboards.py b/api/chalicelib/core/dashboards.py index 10143a5a4..89f56176b 100644 --- a/api/chalicelib/core/dashboards.py +++ b/api/chalicelib/core/dashboards.py @@ -121,7 +121,7 @@ def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashbo WHERE dashboards.project_id = %(projectId)s AND dashboard_id = %(dashboard_id)s AND (dashboards.user_id = %(userId)s OR is_public) - RETURNING dashboard_id,name,description,is_public,createdAt;""" + RETURNING dashboard_id,name,description,is_public,created_at;""" if data.metrics is not None and len(data.metrics) > 0: pg_query = f"""WITH dash AS ({pg_query}) INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config) @@ -137,7 +137,8 @@ def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashbo cur.execute(cur.mogrify(pg_query, params)) row = cur.fetchone() - + if row: + row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) return helper.dict_to_camel_case(row) From 0aa5dbb4ac84e5e7a7e65988b61e8d99620068cc Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Fri, 24 Feb 2023 15:59:16 +0100 Subject: [PATCH 124/218] change(ui): remove additional calls to api after dashb update --- frontend/app/mstore/dashboardStore.ts | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/frontend/app/mstore/dashboardStore.ts b/frontend/app/mstore/dashboardStore.ts index bd8681c2f..5d96173ff 100644 --- a/frontend/app/mstore/dashboardStore.ts +++ b/frontend/app/mstore/dashboardStore.ts @@ -189,19 +189,19 @@ export default class DashboardStore { return new Promise((resolve, reject) => { dashboardService .saveDashboard(dashboard) - .then((_dashboard) => { + .then((_dashboard: any) => { runInAction(() => { if (isCreating) { toast.success('Dashboard created successfully'); this.addDashboard(new Dashboard().fromJson(_dashboard)); } else { toast.success('Dashboard successfully updated '); - this.updateDashboard(new Dashboard().fromJson(_dashboard)); + this.syncDashboardInfo(_dashboard.dashboardId!, _dashboard); } resolve(_dashboard); }); }) - .catch((error) => { + .catch(() => { toast.error('Error saving dashboard'); reject(); }) @@ -213,6 +213,14 @@ export default class DashboardStore { }); } + syncDashboardInfo(id: string, info: { name: string, description: string, isPublic: boolean, createdAt: number }) { + if (this.selectedDashboard !== null) { + this.selectedDashboard.update(info) + const index = this.dashboards.findIndex((d) => d.dashboardId === id); + Object.assign(this.dashboards[index], info) + } + } + saveMetric(metric: Widget, dashboardId: string): Promise { const isCreating = !metric.widgetId; return dashboardService.saveMetric(metric, dashboardId).then((metric) => { From fb5ba3e932750e196152fd9cc89a99b6f425ed1c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 24 Feb 2023 17:42:57 +0100 Subject: [PATCH 125/218] feat(assist): changed image --- utilities/Dockerfile | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/utilities/Dockerfile b/utilities/Dockerfile index 8f4d98549..84b54c906 100644 --- a/utilities/Dockerfile +++ b/utilities/Dockerfile @@ -1,14 +1,11 @@ FROM node:18-alpine LABEL Maintainer="KRAIEM Taha Yassine" -ARG GIT_SHA -LABEL GIT_SHA=$GIT_SHA -RUN apk add --no-cache tini git libc6-compat && ln -s /lib/libc.musl-x86_64.so.1 /lib/ld-linux-x86-64.so.2 +RUN apk add --no-cache tini ARG envarg ENV ENTERPRISE_BUILD=${envarg} \ MAXMINDDB_FILE=/home/openreplay/geoip.mmdb \ PRIVATE_ENDPOINTS=false \ - GIT_SHA=$GIT_SHA \ LISTEN_PORT=9001 WORKDIR /work COPY package.json . From 91286ad76c07eb5b902aec3cf623e6783f2ce885 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 24 Feb 2023 17:58:35 +0100 Subject: [PATCH 126/218] chore(actions): changes feat(assist): changed dependencies --- .github/workflows/assist-ee.yaml | 2 +- .github/workflows/assist.yaml | 2 +- ee/utilities/package-lock.json | 24 ++++++++++++------------ utilities/package-lock.json | 24 ++++++++++++------------ 4 files changed, 26 insertions(+), 26 deletions(-) diff --git a/.github/workflows/assist-ee.yaml b/.github/workflows/assist-ee.yaml index 78a783dd1..fc237d371 100644 --- a/.github/workflows/assist-ee.yaml +++ b/.github/workflows/assist-ee.yaml @@ -6,7 +6,7 @@ on: - dev paths: - "ee/utilities/**" - - "utilities/*/**" + - "utilities/**" - "!utilities/.gitignore" - "!utilities/*-dev.sh" diff --git a/.github/workflows/assist.yaml b/.github/workflows/assist.yaml index c599d5cbd..65ca0348c 100644 --- a/.github/workflows/assist.yaml +++ b/.github/workflows/assist.yaml @@ -5,7 +5,7 @@ on: branches: - dev paths: - - "utilities/*/**" + - "utilities/**" - "!utilities/.gitignore" - "!utilities/*-dev.sh" diff --git a/ee/utilities/package-lock.json b/ee/utilities/package-lock.json index c90edb001..1d74677cf 100644 --- a/ee/utilities/package-lock.json +++ b/ee/utilities/package-lock.json @@ -1,11 +1,11 @@ { - "name": "utilities-server", + "name": "assist-server", "version": "1.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "utilities-server", + "name": "assist-server", "version": "1.0.0", "license": "Elastic License 2.0 (ELv2)", "dependencies": { @@ -117,9 +117,9 @@ } }, "node_modules/@types/node": { - "version": "18.13.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.13.0.tgz", - "integrity": "sha512-gC3TazRzGoOnoKAhUx+Q0t8S9Tzs74z7m0ipwGpSqQrleP14hKxP4/JUeEQcD3W1/aIpnWl8pHowI7WokuZpXg==" + "version": "18.14.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.1.tgz", + "integrity": "sha512-QH+37Qds3E0eDlReeboBxfHbX9omAcBCXEzswCu6jySP642jiM3cYSIkU/REqwhCUqXdonHFuBfJDiAJxMNhaQ==" }, "node_modules/accepts": { "version": "1.3.8", @@ -355,9 +355,9 @@ } }, "node_modules/engine.io": { - "version": "6.4.0", - "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.4.0.tgz", - "integrity": "sha512-OgxY1c/RuCSeO/rTr8DIFXx76IzUUft86R7/P7MMbbkuzeqJoTNw2lmeD91IyGz41QYleIIjWeMJGgug043sfQ==", + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.4.1.tgz", + "integrity": "sha512-JFYQurD/nbsA5BSPmbaOSLa3tSVj8L6o4srSwXXY3NqE+gGUNmmPTbhn8tjzcCtSqhFgIeqef81ngny8JM25hw==", "dependencies": { "@types/cookie": "^0.4.1", "@types/cors": "^2.8.12", @@ -1002,14 +1002,14 @@ } }, "node_modules/socket.io": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.6.0.tgz", - "integrity": "sha512-b65bp6INPk/BMMrIgVvX12x3Q+NqlGqSlTuvKQWt0BUJ3Hyy3JangBl7fEoWZTXbOKlCqNPbQ6MbWgok/km28w==", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.6.1.tgz", + "integrity": "sha512-KMcaAi4l/8+xEjkRICl6ak8ySoxsYG+gG6/XfRCPJPQ/haCRIJBTL4wIl8YCsmtaBovcAXGLOShyVWQ/FG8GZA==", "dependencies": { "accepts": "~1.3.4", "base64id": "~2.0.0", "debug": "~4.3.2", - "engine.io": "~6.4.0", + "engine.io": "~6.4.1", "socket.io-adapter": "~2.5.2", "socket.io-parser": "~4.2.1" }, diff --git a/utilities/package-lock.json b/utilities/package-lock.json index 91cb862d2..aba9e43fe 100644 --- a/utilities/package-lock.json +++ b/utilities/package-lock.json @@ -1,11 +1,11 @@ { - "name": "utilities-server", + "name": "assist-server", "version": "1.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "utilities-server", + "name": "assist-server", "version": "1.0.0", "license": "Elastic License 2.0 (ELv2)", "dependencies": { @@ -45,9 +45,9 @@ } }, "node_modules/@types/node": { - "version": "18.13.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.13.0.tgz", - "integrity": "sha512-gC3TazRzGoOnoKAhUx+Q0t8S9Tzs74z7m0ipwGpSqQrleP14hKxP4/JUeEQcD3W1/aIpnWl8pHowI7WokuZpXg==" + "version": "18.14.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.1.tgz", + "integrity": "sha512-QH+37Qds3E0eDlReeboBxfHbX9omAcBCXEzswCu6jySP642jiM3cYSIkU/REqwhCUqXdonHFuBfJDiAJxMNhaQ==" }, "node_modules/accepts": { "version": "1.3.8", @@ -254,9 +254,9 @@ } }, "node_modules/engine.io": { - "version": "6.4.0", - "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.4.0.tgz", - "integrity": "sha512-OgxY1c/RuCSeO/rTr8DIFXx76IzUUft86R7/P7MMbbkuzeqJoTNw2lmeD91IyGz41QYleIIjWeMJGgug043sfQ==", + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.4.1.tgz", + "integrity": "sha512-JFYQurD/nbsA5BSPmbaOSLa3tSVj8L6o4srSwXXY3NqE+gGUNmmPTbhn8tjzcCtSqhFgIeqef81ngny8JM25hw==", "dependencies": { "@types/cookie": "^0.4.1", "@types/cors": "^2.8.12", @@ -862,14 +862,14 @@ } }, "node_modules/socket.io": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.6.0.tgz", - "integrity": "sha512-b65bp6INPk/BMMrIgVvX12x3Q+NqlGqSlTuvKQWt0BUJ3Hyy3JangBl7fEoWZTXbOKlCqNPbQ6MbWgok/km28w==", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.6.1.tgz", + "integrity": "sha512-KMcaAi4l/8+xEjkRICl6ak8ySoxsYG+gG6/XfRCPJPQ/haCRIJBTL4wIl8YCsmtaBovcAXGLOShyVWQ/FG8GZA==", "dependencies": { "accepts": "~1.3.4", "base64id": "~2.0.0", "debug": "~4.3.2", - "engine.io": "~6.4.0", + "engine.io": "~6.4.1", "socket.io-adapter": "~2.5.2", "socket.io-parser": "~4.2.1" }, From ea8cf98beb3016a0f30b61322c96e9206ee35a4d Mon Sep 17 00:00:00 2001 From: Alex Kaminskii Date: Fri, 24 Feb 2023 17:59:45 +0100 Subject: [PATCH 127/218] feat(frontend): use ResourceTiming from file instead of database --- .../Session_/Network/Network.DEPRECATED.js | 148 ------------------ .../Session_/Network/NetworkContent.js | 14 +- .../app/components/Session_/Network/index.js | 2 - .../DevTools/NetworkPanel/NetworkPanel.tsx | 18 +-- .../FetchDetailsModal/FetchDetailsModal.tsx | 4 +- frontend/app/player/web/MessageManager.ts | 20 +-- frontend/app/player/web/WebPlayer.ts | 3 +- frontend/app/player/web/types.ts | 47 ------ frontend/app/player/web/types/index.ts | 2 + frontend/app/player/web/types/log.ts | 23 +++ frontend/app/player/web/types/resource.ts | 114 ++++++++++++++ frontend/app/types/session/resource.ts | 103 ------------ frontend/app/types/session/session.ts | 14 -- .../app/types/synthetics/domBuildingTime.js | 14 -- frontend/app/types/synthetics/index.js | 27 ---- frontend/app/utils/index.ts | 7 - 16 files changed, 165 insertions(+), 395 deletions(-) delete mode 100644 frontend/app/components/Session_/Network/Network.DEPRECATED.js delete mode 100644 frontend/app/components/Session_/Network/index.js delete mode 100644 frontend/app/player/web/types.ts create mode 100644 frontend/app/player/web/types/index.ts create mode 100644 frontend/app/player/web/types/log.ts create mode 100644 frontend/app/player/web/types/resource.ts delete mode 100644 frontend/app/types/session/resource.ts delete mode 100644 frontend/app/types/synthetics/domBuildingTime.js delete mode 100644 frontend/app/types/synthetics/index.js diff --git a/frontend/app/components/Session_/Network/Network.DEPRECATED.js b/frontend/app/components/Session_/Network/Network.DEPRECATED.js deleted file mode 100644 index b79307431..000000000 --- a/frontend/app/components/Session_/Network/Network.DEPRECATED.js +++ /dev/null @@ -1,148 +0,0 @@ -import React from 'react'; -import cn from 'classnames'; -import { connectPlayer, } from 'Player'; -import { Tooltip, TextEllipsis } from 'UI'; -import { getRE } from 'App/utils'; -import { TYPES } from 'Types/session/resource'; -import stl from './network.module.css'; -import NetworkContent from './NetworkContent'; -import { connect } from 'react-redux'; -import { setTimelinePointer } from 'Duck/sessions'; - -const ALL = 'ALL'; -const XHR = 'xhr'; -const JS = 'js'; -const CSS = 'css'; -const IMG = 'img'; -const MEDIA = 'media'; -const OTHER = 'other'; - -const TAB_TO_TYPE_MAP = { - [XHR]: TYPES.XHR, - [JS]: TYPES.JS, - [CSS]: TYPES.CSS, - [IMG]: TYPES.IMG, - [MEDIA]: TYPES.MEDIA, - [OTHER]: TYPES.OTHER, -}; - -export function renderName(r) { - return ( -
- {r.url}
} - > - {r.name} - -
- ); -} - -export function renderDuration(r) { - if (!r.success) return 'x'; - - const text = `${Math.round(r.duration)}ms`; - if (!r.isRed && !r.isYellow) return text; - - let tooltipText; - let className = 'w-full h-full flex items-center '; - if (r.isYellow) { - tooltipText = 'Slower than average'; - className += 'warn color-orange'; - } else { - tooltipText = 'Much slower than average'; - className += 'error color-red'; - } - - return ( - -
{text}
-
- ); -} - -@connectPlayer((state) => ({ - location: state.location, - resources: state.resourceList, - domContentLoadedTime: state.domContentLoadedTime, - loadTime: state.loadTime, - // time: state.time, - playing: state.playing, - domBuildingTime: state.domBuildingTime, - fetchPresented: state.fetchList.length > 0, - listNow: state.resourceListNow, -})) -@connect( - (state) => ({ - timelinePointer: state.getIn(['sessions', 'timelinePointer']), - }), - { setTimelinePointer } -) -export default class Network extends React.PureComponent { - state = { - filter: '', - filteredList: this.props.resources, - activeTab: ALL, - currentIndex: 0, - }; - - onRowClick = (e, index) => { - // no action for direct click on network requests (so far), there is a jump button, and we don't have more information for than is already displayed in the table - }; - - onTabClick = (activeTab) => this.setState({ activeTab }); - - onFilterChange = (e, { value }) => { - const { resources } = this.props; - const filterRE = getRE(value, 'i'); - const filtered = resources.filter( - ({ type, name }) => - filterRE.test(name) && (activeTab === ALL || type === TAB_TO_TYPE_MAP[activeTab]) - ); - - this.setState({ filter: value, filteredList: value ? filtered : resources, currentIndex: 0 }); - }; - - static getDerivedStateFromProps(nextProps, prevState) { - const { filteredList } = prevState; - if (nextProps.timelinePointer) { - const activeItem = filteredList.find((r) => r.time >= nextProps.timelinePointer.time); - return { - currentIndex: activeItem ? filteredList.indexOf(activeItem) : filteredList.length - 1, - }; - } - } - - render() { - const { location, domContentLoadedTime, loadTime, domBuildingTime, fetchPresented, listNow } = - this.props; - const { filteredList } = this.state; - const resourcesSize = filteredList.reduce( - (sum, { decodedBodySize }) => sum + (decodedBodySize || 0), - 0 - ); - const transferredSize = filteredList.reduce( - (sum, { headerSize, encodedBodySize }) => sum + (headerSize || 0) + (encodedBodySize || 0), - 0 - ); - - return ( - - - - ); - } -} diff --git a/frontend/app/components/Session_/Network/NetworkContent.js b/frontend/app/components/Session_/Network/NetworkContent.js index 5718fb55e..f55f5e407 100644 --- a/frontend/app/components/Session_/Network/NetworkContent.js +++ b/frontend/app/components/Session_/Network/NetworkContent.js @@ -2,7 +2,7 @@ import React from 'react'; import cn from 'classnames'; import { QuestionMarkHint, Tooltip, Tabs, Input, NoContent, Icon, Toggler } from 'UI'; import { getRE } from 'App/utils'; -import { TYPES } from 'Types/session/resource'; +import { ResourceType } from 'Player'; import { formatBytes } from 'App/utils'; import { formatMs } from 'App/date'; @@ -21,12 +21,12 @@ const MEDIA = 'media'; const OTHER = 'other'; const TAB_TO_TYPE_MAP = { - [XHR]: TYPES.XHR, - [JS]: TYPES.JS, - [CSS]: TYPES.CSS, - [IMG]: TYPES.IMG, - [MEDIA]: TYPES.MEDIA, - [OTHER]: TYPES.OTHER, + [XHR]: ResourceType.XHR, + [JS]: ResourceType.SCRIPT, + [CSS]: ResourceType.CSS, + [IMG]: ResourceType.IMG, + [MEDIA]: ResourceType.MEDIA, + [OTHER]: ResourceType.OTHER, }; const TABS = [ALL, XHR, JS, CSS, IMG, MEDIA, OTHER].map((tab) => ({ text: tab, diff --git a/frontend/app/components/Session_/Network/index.js b/frontend/app/components/Session_/Network/index.js deleted file mode 100644 index 446e76ea6..000000000 --- a/frontend/app/components/Session_/Network/index.js +++ /dev/null @@ -1,2 +0,0 @@ -export { default } from './Network'; -export * from './Network'; diff --git a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx index 9339b1e8b..b7207ddef 100644 --- a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx +++ b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx @@ -3,7 +3,7 @@ import { observer } from 'mobx-react-lite'; import { Duration } from 'luxon'; import { Tooltip, Tabs, Input, NoContent, Icon, Toggler } from 'UI'; -import { TYPES } from 'Types/session/resource'; +import { ResourceType } from 'Player'; import { formatBytes } from 'App/utils'; import { formatMs } from 'App/date'; import { useModal } from 'App/components/Modal'; @@ -28,13 +28,13 @@ const MEDIA = 'media'; const OTHER = 'other'; const TYPE_TO_TAB = { - [TYPES.XHR]: XHR, - [TYPES.FETCH]: XHR, - [TYPES.JS]: JS, - [TYPES.CSS]: CSS, - [TYPES.IMG]: IMG, - [TYPES.MEDIA]: MEDIA, - [TYPES.OTHER]: OTHER, + [ResourceType.XHR]: XHR, + [ResourceType.FETCH]: XHR, + [ResourceType.SCRIPT]: JS, + [ResourceType.CSS]: CSS, + [ResourceType.IMG]: IMG, + [ResourceType.MEDIA]: MEDIA, + [ResourceType.OTHER]: OTHER, } const TAP_KEYS = [ALL, XHR, JS, CSS, IMG, MEDIA, OTHER] as const; @@ -154,7 +154,7 @@ function NetworkPanel({ startedAt }: { startedAt: number }) { const activeIndex = devTools[INDEX_KEY].index; const list = useMemo(() => - // TODO: better merge (with body size info) + // TODO: better merge (with body size info) - do it in player resourceList.filter(res => !fetchList.some(ft => { // res.url !== ft.url doesn't work on relative URLs appearing within fetchList (to-fix in player) if (res.name !== ft.name) { return false } diff --git a/frontend/app/components/shared/FetchDetailsModal/FetchDetailsModal.tsx b/frontend/app/components/shared/FetchDetailsModal/FetchDetailsModal.tsx index 49d6536bc..09a08ae16 100644 --- a/frontend/app/components/shared/FetchDetailsModal/FetchDetailsModal.tsx +++ b/frontend/app/components/shared/FetchDetailsModal/FetchDetailsModal.tsx @@ -1,7 +1,7 @@ import React, { useEffect, useState } from 'react'; import FetchBasicDetails from './components/FetchBasicDetails'; import { Button } from 'UI'; -import { TYPES } from 'Types/session/resource'; +import { ResourceType } from 'Player'; import FetchTabs from './components/FetchTabs/FetchTabs'; import { useStore } from 'App/mstore'; import { DateTime } from 'luxon'; @@ -17,7 +17,7 @@ function FetchDetailsModal(props: Props) { const [resource, setResource] = useState(props.resource); const [first, setFirst] = useState(false); const [last, setLast] = useState(false); - const isXHR = resource.type === TYPES.XHR || resource.type === TYPES.FETCH + const isXHR = resource.type === ResourceType.XHR || resource.type === ResourceType.FETCH const { sessionStore: { devTools }, settingsStore: { sessionSettings: { timezone }}, diff --git a/frontend/app/player/web/MessageManager.ts b/frontend/app/player/web/MessageManager.ts index 68ef0cbf8..72c07c445 100644 --- a/frontend/app/player/web/MessageManager.ts +++ b/frontend/app/player/web/MessageManager.ts @@ -2,9 +2,9 @@ import { Decoder } from "syncod"; import logger from 'App/logger'; -import Resource, { TYPES as RES_TYPES } from 'Types/session/resource'; import { TYPES as EVENT_TYPES } from 'Types/session/event'; -import { Log } from './types'; +import { Log } from './types/log'; +import { Resource, ResourceType, getResourceFromResourceTiming, getResourceFromNetworkRequest } from './types/resource' import { toast } from 'react-toastify'; @@ -395,19 +395,13 @@ export default class MessageManager { Log(msg) ) break; + case MType.ResourceTiming: + // TODO: merge `resource` and `fetch` lists into one here instead of UI + this.lists.lists.resource.insert(getResourceFromResourceTiming(msg, this.sessionStart)) + break; case MType.Fetch: case MType.NetworkRequest: - this.lists.lists.fetch.insert(new Resource({ - method: msg.method, - url: msg.url, - request: msg.request, - response: msg.response, - status: msg.status, - duration: msg.duration, - type: msg.type === "xhr" ? RES_TYPES.XHR : RES_TYPES.FETCH, - time: Math.max(msg.timestamp - this.sessionStart, 0), // !!! doesn't look good. TODO: find solution to show negative timings - index, - }) as Timed) + this.lists.lists.fetch.insert(getResourceFromNetworkRequest(msg, this.sessionStart)) break; case MType.Redux: decoded = this.decodeStateMessage(msg, ["state", "action"]); diff --git a/frontend/app/player/web/WebPlayer.ts b/frontend/app/player/web/WebPlayer.ts index d94d10beb..d1a56f9fd 100644 --- a/frontend/app/player/web/WebPlayer.ts +++ b/frontend/app/player/web/WebPlayer.ts @@ -1,4 +1,4 @@ -import { Log, LogLevel } from './types' +import { Log, LogLevel } from './types/log' import type { Store } from 'App/player' import Player from '../player/Player' @@ -30,7 +30,6 @@ export default class WebPlayer extends Player { let initialLists = live ? {} : { event: session.events || [], stack: session.stackEvents || [], - resource: session.resources || [], // MBTODO: put ResourceTiming in file exceptions: session.errors?.map(({ name, ...rest }: any) => Log({ level: LogLevel.ERROR, diff --git a/frontend/app/player/web/types.ts b/frontend/app/player/web/types.ts deleted file mode 100644 index 485921ed0..000000000 --- a/frontend/app/player/web/types.ts +++ /dev/null @@ -1,47 +0,0 @@ -export enum LogLevel { - INFO = 'info', - LOG = 'log', - //ASSERT = 'assert', //? - WARN = 'warn', - ERROR = 'error', - EXCEPTION = 'exception', -} - -export interface ILog { - level: LogLevel - value: string - time: number - index?: number - errorId?: string -} - -export const Log = (log: ILog) => ({ - isRed: log.level === LogLevel.EXCEPTION || log.level === LogLevel.ERROR, - isYellow: log.level === LogLevel.WARN, - ...log -}) - - - - -// func getResourceType(initiator string, URL string) string { -// switch initiator { -// case "xmlhttprequest", "fetch": -// return "fetch" -// case "img": -// return "img" -// default: -// switch getURLExtention(URL) { -// case "css": -// return "stylesheet" -// case "js": -// return "script" -// case "png", "gif", "jpg", "jpeg", "svg": -// return "img" -// case "mp4", "mkv", "ogg", "webm", "avi", "mp3": -// return "media" -// default: -// return "other" -// } -// } -// } \ No newline at end of file diff --git a/frontend/app/player/web/types/index.ts b/frontend/app/player/web/types/index.ts new file mode 100644 index 000000000..2f5f8e464 --- /dev/null +++ b/frontend/app/player/web/types/index.ts @@ -0,0 +1,2 @@ +export * from './log' +export * from './resource' \ No newline at end of file diff --git a/frontend/app/player/web/types/log.ts b/frontend/app/player/web/types/log.ts new file mode 100644 index 000000000..22a20d33c --- /dev/null +++ b/frontend/app/player/web/types/log.ts @@ -0,0 +1,23 @@ +export const enum LogLevel { + INFO = 'info', + LOG = 'log', + //ASSERT = 'assert', //? + WARN = 'warn', + ERROR = 'error', + EXCEPTION = 'exception', +} + +export interface ILog { + level: LogLevel + value: string + time: number + index?: number + errorId?: string +} + +export const Log = (log: ILog) => ({ + isRed: log.level === LogLevel.EXCEPTION || log.level === LogLevel.ERROR, + isYellow: log.level === LogLevel.WARN, + ...log +}) + diff --git a/frontend/app/player/web/types/resource.ts b/frontend/app/player/web/types/resource.ts new file mode 100644 index 000000000..032790e48 --- /dev/null +++ b/frontend/app/player/web/types/resource.ts @@ -0,0 +1,114 @@ +import type { ResourceTiming, NetworkRequest, Fetch } from '../messages' + +export const enum ResourceType { + XHR = 'xhr', + FETCH = 'fetch', + SCRIPT = 'script', + CSS = 'css', + IMG = 'img', + MEDIA = 'media', + OTHER = 'other', +} + +function getURLExtention(url: string): string { + const pts = url.split(".") + return pts[pts.length-1] || "" +} + +// maybe move this thing to the tracker +function getResourceType(initiator: string, url: string): ResourceType { + switch (initiator) { + case "xmlhttprequest": + case "fetch": + return ResourceType.FETCH + case "img": + return ResourceType.IMG + default: + switch (getURLExtention(url)) { + case "css": + return ResourceType.CSS + case "js": + return ResourceType.SCRIPT + case "png": + case "gif": + case "jpg": + case "jpeg": + case "svg": + return ResourceType.IMG + case "mp4": + case "mkv": + case "ogg": + case "webm": + case "avi": + case "mp3": + return ResourceType.MEDIA + default: + return ResourceType.OTHER + } + } +} + +function getResourceName(url: string) { + return url + .split('/') + .filter((s) => s !== '') + .pop(); +} + + +const YELLOW_BOUND = 10; +const RED_BOUND = 80; + + +interface IResource { + //index: number, + time: number, + type: ResourceType, + url: string, + status: string, + method: string, + duration: number, + success: boolean, + ttfb?: number, + request?: string, + response?: string, + headerSize?: number, + encodedBodySize?: number, + decodedBodySize?: number, + responseBodySize?: number, +} + + +export const Resource = (resource: IResource) => ({ + ...resource, + name: getResourceName(resource.url), + isRed: !resource.success, //|| resource.score >= RED_BOUND, + isYellow: false, // resource.score < RED_BOUND && resource.score >= YELLOW_BOUND, +}) + + +export function getResourceFromResourceTiming(msg: ResourceTiming, sessStart: number) { + const success = msg.duration > 0 // might be duration=0 when cached + const type = getResourceType(msg.initiator, msg.url) + return Resource({ + ...msg, + type, + method: type === ResourceType.FETCH ? ".." : "GET", // should be GET for all non-XHR/Fetch resources, right? + success, + status: success ? '2xx-3xx' : '4xx-5xx', + time: Math.max(0, msg.timestamp - sessStart) + }) +} + +export function getResourceFromNetworkRequest(msg: NetworkRequest | Fetch, sessStart: number) { + return Resource({ + ...msg, + // @ts-ignore + type: msg?.type === "xhr" ? ResourceType.XHR : ResourceType.FETCH, + success: msg.status < 400, + status: String(msg.status), + time: Math.max(0, msg.timestamp - sessStart), + }) +} + + diff --git a/frontend/app/types/session/resource.ts b/frontend/app/types/session/resource.ts deleted file mode 100644 index 7e09e8f02..000000000 --- a/frontend/app/types/session/resource.ts +++ /dev/null @@ -1,103 +0,0 @@ -import Record from 'Types/Record'; -import { getResourceName } from 'App/utils'; - -const XHR = 'xhr' as const; -const FETCH = 'fetch' as const; -const JS = 'script' as const; -const CSS = 'css' as const; -const IMG = 'img' as const; -const MEDIA = 'media' as const; -const OTHER = 'other' as const; - -function getResourceStatus(status: number, success: boolean) { - if (status !== undefined) return String(status); - if (typeof success === 'boolean' || typeof success === 'number') { - return !!success - ? '2xx-3xx' - : '4xx-5xx'; - } - return '2xx-3xx'; -} - -export const TYPES = { - XHR, - FETCH, - JS, - CSS, - IMG, - MEDIA, - OTHER, - "stylesheet": CSS, -} - -const YELLOW_BOUND = 10; -const RED_BOUND = 80; - -export function isRed(r: IResource) { - return !r.success || r.score >= RED_BOUND; -} - -interface IResource { - type: keyof typeof TYPES, - url: string, - name: string, - status: number, - duration: number, - index: number, - time: number, - ttfb: number, - timewidth: number, - success: boolean, - score: number, - method: string, - request: string, - response: string, - headerSize: number, - encodedBodySize: number, - decodedBodySize: number, - responseBodySize: number, - timings: Record - datetime: number - timestamp: number -} - -export default class Resource { - name = 'Resource' - type: IResource["type"] - status: string - success: IResource["success"] - time: IResource["time"] - ttfb: IResource["ttfb"] - url: IResource["url"] - duration: IResource["duration"] - index: IResource["index"] - timewidth: IResource["timewidth"] - score: IResource["score"] - method: IResource["method"] - request: IResource["request"] - response: IResource["response"] - headerSize: IResource["headerSize"] - encodedBodySize: IResource["encodedBodySize"] - decodedBodySize: IResource["decodedBodySize"] - responseBodySize: IResource["responseBodySize"] - timings: IResource["timings"] - - constructor({ status, success, time, datetime, timestamp, timings, ...resource }: IResource) { - - // adjusting for 201, 202 etc - const reqSuccess = 300 > status || success - Object.assign(this, { - ...resource, - name: getResourceName(resource.url), - status: getResourceStatus(status, success), - success: reqSuccess, - time: typeof time === 'number' ? time : datetime || timestamp, - ttfb: timings && timings.ttfb, - timewidth: timings && timings.timewidth, - timings, - isRed: !reqSuccess || resource.score >= RED_BOUND, - isYellow: resource.score < RED_BOUND && resource.score >= YELLOW_BOUND, - }) - } -} - diff --git a/frontend/app/types/session/session.ts b/frontend/app/types/session/session.ts index 4dcef8d49..3b254ae4b 100644 --- a/frontend/app/types/session/session.ts +++ b/frontend/app/types/session/session.ts @@ -1,7 +1,6 @@ import { Duration } from 'luxon'; import SessionEvent, { TYPES, EventData, InjectedEvent } from './event'; import StackEvent from './stackEvent'; -import Resource from './resource'; import SessionError, { IError } from './error'; import Issue, { IIssue } from './issue'; import { Note } from 'App/services/NotesService' @@ -31,8 +30,6 @@ export interface ISession { duration: number, events: InjectedEvent[], stackEvents: StackEvent[], - resources: Resource[], - missedResources: Resource[], metadata: [], favorite: boolean, filterId?: string, @@ -119,7 +116,6 @@ export default class Session { duration: ISession["duration"] events: ISession["events"] stackEvents: ISession["stackEvents"] - resources: ISession["resources"] metadata: ISession["metadata"] favorite: ISession["favorite"] filterId?: ISession["filterId"] @@ -181,7 +177,6 @@ export default class Session { devtoolsURL = [], mobsUrl = [], notes = [], - resources = [], ...session } = sessionData const duration = Duration.fromMillis(session.duration < 1000 ? 1000 : session.duration); @@ -208,13 +203,6 @@ export default class Session { }) } - let resourcesList = resources.map((r) => new Resource(r as any)); - resourcesList.forEach((r: Resource) => { - r.time = Math.max(0, r.time - startedAt) - }) - resourcesList = resourcesList.sort((r1, r2) => r1.time - r2.time); - const missedResources = resourcesList.filter(({ success }) => !success); - const stackEventsList: StackEvent[] = [] if (stackEvents?.length || session.userEvents?.length) { const mergedArrays = [...stackEvents, ...session.userEvents] @@ -245,8 +233,6 @@ export default class Session { siteId: projectId, events, stackEvents: stackEventsList, - resources: resourcesList, - missedResources, userDevice, userDeviceType, isMobile, diff --git a/frontend/app/types/synthetics/domBuildingTime.js b/frontend/app/types/synthetics/domBuildingTime.js deleted file mode 100644 index 258902f60..000000000 --- a/frontend/app/types/synthetics/domBuildingTime.js +++ /dev/null @@ -1,14 +0,0 @@ -import { Record } from 'immutable'; - -const DomBuildingTime = Record({ - avg: undefined, - chart: [], -}); - - -function fromJS(data = {}) { - if (data instanceof DomBuildingTime) return data; - return new DomBuildingTime(data); -} - -export default fromJS; \ No newline at end of file diff --git a/frontend/app/types/synthetics/index.js b/frontend/app/types/synthetics/index.js deleted file mode 100644 index 1cec8da81..000000000 --- a/frontend/app/types/synthetics/index.js +++ /dev/null @@ -1,27 +0,0 @@ -import { getChartFormatter } from './helper'; -import DomBuildingTime from './domBuildingTime'; - -export const WIDGET_LIST = [ - { - key: "resourcesLoadingTime", - name: "Resource Fetch Time", - description: 'List of resources that are slowing down your website, sorted by the number of impacted sessions.', - thumb: 'na.png', - type: 'resources', - dataWrapper: (list, period) => DomBuildingTime(list) - .update("chart", getChartFormatter(period)) - }, -]; - -export const WIDGET_KEYS = WIDGET_LIST.map(({ key }) => key); - -const WIDGET_MAP = {}; -WIDGET_LIST.forEach(w => { WIDGET_MAP[ w.key ] = w; }); - -const OVERVIEW_WIDGET_MAP = {}; -WIDGET_LIST.filter(w => w.type === 'overview').forEach(w => { OVERVIEW_WIDGET_MAP[ w.key ] = w; }); - -export { - WIDGET_MAP, - OVERVIEW_WIDGET_MAP -}; diff --git a/frontend/app/utils/index.ts b/frontend/app/utils/index.ts index b1ae63f43..47b40aa70 100644 --- a/frontend/app/utils/index.ts +++ b/frontend/app/utils/index.ts @@ -17,13 +17,6 @@ export function debounce(callback, wait, context = this) { }; } -export function getResourceName(url: string) { - return url - .split('/') - .filter((s) => s !== '') - .pop(); -} - /* eslint-disable no-mixed-operators */ export function randomInt(a, b) { const min = (b ? a : 0) - 0.5; From 6d4b797ab5a8bbd866247ffe7f609a170f468cf0 Mon Sep 17 00:00:00 2001 From: Alex Kaminskii Date: Fri, 24 Feb 2023 18:04:08 +0100 Subject: [PATCH 128/218] fix(player):ignore query in file extension parsing --- frontend/app/player/web/types/resource.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/player/web/types/resource.ts b/frontend/app/player/web/types/resource.ts index 032790e48..c1b48ce9d 100644 --- a/frontend/app/player/web/types/resource.ts +++ b/frontend/app/player/web/types/resource.ts @@ -11,7 +11,7 @@ export const enum ResourceType { } function getURLExtention(url: string): string { - const pts = url.split(".") + const pts = url.split("?")[0].split(".") return pts[pts.length-1] || "" } From e4392a1bf8b3f518eea4fa1bccd20b6beffa8451 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 24 Feb 2023 18:07:17 +0100 Subject: [PATCH 129/218] feat(assist): changed image --- utilities/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utilities/Dockerfile b/utilities/Dockerfile index 84b54c906..edbaae03c 100644 --- a/utilities/Dockerfile +++ b/utilities/Dockerfile @@ -18,4 +18,4 @@ USER 1001 ADD --chown=1001 https://static.openreplay.com/geoip/GeoLite2-Country.mmdb $MAXMINDDB_FILE ENTRYPOINT ["/sbin/tini", "--"] -CMD npm start \ No newline at end of file +CMD npm start From b45a66673327156636f8ea40880a5dda5ef9c05c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 24 Feb 2023 18:15:53 +0100 Subject: [PATCH 130/218] chore(actions): change actions --- .github/workflows/assist-ee.yaml | 120 +++++++++++++++++++++++++++++++ .github/workflows/assist.yaml | 119 ++++++++++++++++++++++++++++++ .github/workflows/utilities.yaml | 66 ----------------- 3 files changed, 239 insertions(+), 66 deletions(-) create mode 100644 .github/workflows/assist-ee.yaml create mode 100644 .github/workflows/assist.yaml delete mode 100644 .github/workflows/utilities.yaml diff --git a/.github/workflows/assist-ee.yaml b/.github/workflows/assist-ee.yaml new file mode 100644 index 000000000..e5b5c62e1 --- /dev/null +++ b/.github/workflows/assist-ee.yaml @@ -0,0 +1,120 @@ +# This action will push the assist changes to aws +on: + workflow_dispatch: + push: + branches: + - dev + paths: + - "ee/utilities/**" + - "utilities/**" + - "!utilities/.gitignore" + - "!utilities/*-dev.sh" + +name: Build and Deploy Assist EE + +jobs: + deploy: + name: Deploy + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + # We need to diff with old commit + # to see which workers got changed. + fetch-depth: 2 + + - name: Docker login + run: | + docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}" + + - uses: azure/k8s-set-context@v1 + with: + method: kubeconfig + kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret. + id: setcontext + + - name: Building and Pushing Assist image + id: build-image + env: + DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} + IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee + ENVIRONMENT: staging + run: | + skip_security_checks=${{ github.event.inputs.skip_security_checks }} + cd utilities + PUSH_IMAGE=0 bash -x ./build.sh ee + [[ "x$skip_security_checks" == "xtrue" ]] || { + curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ + images=("assist") + for image in ${images[*]};do + ./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG + done + err_code=$? + [[ $err_code -ne 0 ]] && { + exit $err_code + } + } && { + echo "Skipping Security Checks" + } + images=("assist") + for image in ${images[*]};do + docker push $DOCKER_REPO/$image:$IMAGE_TAG + done + - name: Creating old image input + run: | + # + # Create yaml with existing image tags + # + kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\ + tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt + + echo > /tmp/image_override.yaml + + for line in `cat /tmp/image_tag.txt`; + do + image_array=($(echo "$line" | tr ':' '\n')) + cat <> /tmp/image_override.yaml + ${image_array[0]}: + image: + # We've to strip off the -ee, as helm will append it. + tag: `echo ${image_array[1]} | cut -d '-' -f 1` + EOF + done + - name: Deploy to kubernetes + run: | + cd scripts/helmcharts/ + + ## Update secerts + sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml + sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.EE_PG_PASSWORD }}\"/g" vars.yaml + sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\"/g" vars.yaml + sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.EE_MINIO_SECRET_KEY }}\"/g" vars.yaml + sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.EE_JWT_SECRET }}\"/g" vars.yaml + sed -i "s/domainName: \"\"/domainName: \"${{ secrets.EE_DOMAIN_NAME }}\"/g" vars.yaml + sed -i "s/enterpriseEditionLicense: \"\"/enterpriseEditionLicense: \"${{ secrets.EE_LICENSE_KEY }}\"/g" vars.yaml + + # Update changed image tag + sed -i "/assist/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml + + cat /tmp/image_override.yaml + # Deploy command + mv openreplay/charts/{ingress-nginx,chalice,quickwit} /tmp + rm -rf openreplay/charts/* + mv /tmp/{ingress-nginx,chalice,quickwit} openreplay/charts/ + helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f - + env: + DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} + # We're not passing -ee flag, because helm will add that. + IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} + ENVIRONMENT: staging + + # - name: Debug Job + # if: ${{ failure() }} + # uses: mxschmitt/action-tmate@v3 + # env: + # DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} + # IMAGE_TAG: ${{ github.sha }} + # ENVIRONMENT: staging + # \ No newline at end of file diff --git a/.github/workflows/assist.yaml b/.github/workflows/assist.yaml new file mode 100644 index 000000000..ef93fc157 --- /dev/null +++ b/.github/workflows/assist.yaml @@ -0,0 +1,119 @@ +# This action will push the assist changes to aws +on: + workflow_dispatch: + push: + branches: + - dev + paths: + - "utilities/**" + - "!utilities/.gitignore" + - "!utilities/*-dev.sh" + +name: Build and Deploy Assist + +jobs: + deploy: + name: Deploy + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + # We need to diff with old commit + # to see which workers got changed. + fetch-depth: 2 + + - name: Docker login + run: | + docker login ${{ secrets.OSS_REGISTRY_URL }} -u ${{ secrets.OSS_DOCKER_USERNAME }} -p "${{ secrets.OSS_REGISTRY_TOKEN }}" + + - uses: azure/k8s-set-context@v1 + with: + method: kubeconfig + kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret. + id: setcontext + + - name: Building and Pushing Assist image + id: build-image + env: + DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} + IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} + ENVIRONMENT: staging + run: | + skip_security_checks=${{ github.event.inputs.skip_security_checks }} + cd utilities + PUSH_IMAGE=0 bash -x ./build.sh + [[ "x$skip_security_checks" == "xtrue" ]] || { + curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ + images=("assist") + for image in ${images[*]};do + ./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG + done + err_code=$? + [[ $err_code -ne 0 ]] && { + exit $err_code + } + } && { + echo "Skipping Security Checks" + } + images=("assist") + for image in ${images[*]};do + docker push $DOCKER_REPO/$image:$IMAGE_TAG + done + - name: Creating old image input + run: | + # + # Create yaml with existing image tags + # + kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\ + tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt + + echo > /tmp/image_override.yaml + + for line in `cat /tmp/image_tag.txt`; + do + image_array=($(echo "$line" | tr ':' '\n')) + cat <> /tmp/image_override.yaml + ${image_array[0]}: + image: + # We've to strip off the -ee, as helm will append it. + tag: `echo ${image_array[1]} | cut -d '-' -f 1` + EOF + done + - name: Deploy to kubernetes + run: | + cd scripts/helmcharts/ + + ## Update secerts + sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml + sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.OSS_PG_PASSWORD }}\"/g" vars.yaml + sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\"/g" vars.yaml + sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\"/g" vars.yaml + sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.OSS_JWT_SECRET }}\"/g" vars.yaml + sed -i "s/domainName: \"\"/domainName: \"${{ secrets.OSS_DOMAIN_NAME }}\"/g" vars.yaml + sed -i "s/enterpriseEditionLicense: \"\"/enterpriseEditionLicense: \"${{ secrets.OSS_LICENSE_KEY }}\"/g" vars.yaml + + # Update changed image tag + sed -i "/assist/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml + + cat /tmp/image_override.yaml + # Deploy command + mv openreplay/charts/{ingress-nginx,chalice,quickwit} /tmp + rm -rf openreplay/charts/* + mv /tmp/{ingress-nginx,chalice,quickwit} openreplay/charts/ + helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f - + env: + DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} + # We're not passing -ee flag, because helm will add that. + IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} + ENVIRONMENT: staging + + # - name: Debug Job + # if: ${{ failure() }} + # uses: mxschmitt/action-tmate@v3 + # env: + # DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} + # IMAGE_TAG: ${{ github.sha }} + # ENVIRONMENT: staging + # \ No newline at end of file diff --git a/.github/workflows/utilities.yaml b/.github/workflows/utilities.yaml deleted file mode 100644 index 92e130c84..000000000 --- a/.github/workflows/utilities.yaml +++ /dev/null @@ -1,66 +0,0 @@ -# This action will push the utilities changes to aws -on: - workflow_dispatch: - push: - branches: - - dev - paths: - - utilities/** - -name: Build and Deploy Utilities - -jobs: - deploy: - name: Deploy - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v2 - with: - # We need to diff with old commit - # to see which workers got changed. - fetch-depth: 2 - - - name: Docker login - run: | - docker login ${{ secrets.OSS_REGISTRY_URL }} -u ${{ secrets.OSS_DOCKER_USERNAME }} -p "${{ secrets.OSS_REGISTRY_TOKEN }}" - - - uses: azure/k8s-set-context@v1 - with: - method: kubeconfig - kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret. - id: setcontext - - - name: Building and Pusing api image - id: build-image - env: - DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} - IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} - ENVIRONMENT: staging - run: | - cd utilities - PUSH_IMAGE=1 bash build.sh - - name: Deploy to kubernetes - run: | - cd scripts/helm/ - sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml - sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\" #g" vars.yaml - sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\" #g" vars.yaml - sed -i "s#domain_name.*#domain_name: \"foss.openreplay.com\" #g" vars.yaml - sed -i "s#kubeconfig.*#kubeconfig_path: ${KUBECONFIG}#g" vars.yaml - sed -i "s/image_tag:.*/image_tag: \"$IMAGE_TAG\"/g" vars.yaml - bash kube-install.sh --app utilities - env: - DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} - IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} - ENVIRONMENT: staging - - # - name: Debug Job - # if: ${{ failure() }} - # uses: mxschmitt/action-tmate@v3 - # env: - # DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} - # IMAGE_TAG: ${{ github.sha }} - # ENVIRONMENT: staging - # From 081cb778cf131faa665f414d1d3b2e491423c69c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 24 Feb 2023 18:17:45 +0100 Subject: [PATCH 131/218] feat(assist): upgrade changes --- ee/utilities/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/utilities/Dockerfile b/ee/utilities/Dockerfile index 08ccba56f..3119b5eed 100644 --- a/ee/utilities/Dockerfile +++ b/ee/utilities/Dockerfile @@ -18,4 +18,4 @@ USER 1001 ADD --chown=1001 https://static.openreplay.com/geoip/GeoLite2-Country.mmdb $MAXMINDDB_FILE ENTRYPOINT ["/sbin/tini", "--"] -CMD npm start +CMD npm start \ No newline at end of file From f1bd20c99468708b4c74248581855c12781777f8 Mon Sep 17 00:00:00 2001 From: Mehdi Osman Date: Fri, 24 Feb 2023 13:09:00 -0500 Subject: [PATCH 132/218] Delete ee/scripts/helm/db/init_dbs/postgresql/1.8.3 directory --- .../helm/db/init_dbs/postgresql/1.8.3/1.8.3.sql | 13 ------------- 1 file changed, 13 deletions(-) delete mode 100644 ee/scripts/helm/db/init_dbs/postgresql/1.8.3/1.8.3.sql diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.8.3/1.8.3.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.8.3/1.8.3.sql deleted file mode 100644 index 950f4e179..000000000 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.8.3/1.8.3.sql +++ /dev/null @@ -1,13 +0,0 @@ -BEGIN; -CREATE OR REPLACE FUNCTION openreplay_version() - RETURNS text AS -$$ -SELECT 'v1.8.3-ee' -$$ LANGUAGE sql IMMUTABLE; - -ALTER TABLE IF EXISTS public.webhooks - ALTER COLUMN type SET DEFAULT 'webhook'; - -ALTER TYPE webhook_type ADD VALUE IF NOT EXISTS 'msteams'; - -COMMIT; \ No newline at end of file From d9589ca7808fed2054c49737a23861a72d7f0b3a Mon Sep 17 00:00:00 2001 From: Mehdi Osman Date: Fri, 24 Feb 2023 13:09:59 -0500 Subject: [PATCH 133/218] Delete scripts/helm/db/init_dbs/postgresql/1.8.3 directory --- scripts/helm/db/init_dbs/postgresql/1.8.3/1.8.3.sql | 13 ------------- 1 file changed, 13 deletions(-) delete mode 100644 scripts/helm/db/init_dbs/postgresql/1.8.3/1.8.3.sql diff --git a/scripts/helm/db/init_dbs/postgresql/1.8.3/1.8.3.sql b/scripts/helm/db/init_dbs/postgresql/1.8.3/1.8.3.sql deleted file mode 100644 index 30be2368d..000000000 --- a/scripts/helm/db/init_dbs/postgresql/1.8.3/1.8.3.sql +++ /dev/null @@ -1,13 +0,0 @@ -BEGIN; -CREATE OR REPLACE FUNCTION openreplay_version() - RETURNS text AS -$$ -SELECT 'v1.8.3' -$$ LANGUAGE sql IMMUTABLE; - -ALTER TABLE IF EXISTS public.webhooks - ALTER COLUMN type SET DEFAULT 'webhook'; - -ALTER TYPE webhook_type ADD VALUE IF NOT EXISTS 'msteams'; - -COMMIT; \ No newline at end of file From b94bd834d30aeec2dc03573f0fbb25489d4b9e17 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 27 Feb 2023 11:36:30 +0100 Subject: [PATCH 134/218] fix(ui) - dashboard info update --- frontend/app/mstore/dashboardStore.ts | 4 ++-- frontend/app/mstore/types/dashboard.ts | 8 ++++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/frontend/app/mstore/dashboardStore.ts b/frontend/app/mstore/dashboardStore.ts index 5d96173ff..ad9aaa8f7 100644 --- a/frontend/app/mstore/dashboardStore.ts +++ b/frontend/app/mstore/dashboardStore.ts @@ -215,9 +215,9 @@ export default class DashboardStore { syncDashboardInfo(id: string, info: { name: string, description: string, isPublic: boolean, createdAt: number }) { if (this.selectedDashboard !== null) { - this.selectedDashboard.update(info) + this.selectedDashboard.updateInfo(info) const index = this.dashboards.findIndex((d) => d.dashboardId === id); - Object.assign(this.dashboards[index], info) + this.dashboards[index].updateInfo(info); } } diff --git a/frontend/app/mstore/types/dashboard.ts b/frontend/app/mstore/types/dashboard.ts index 098f28ee9..af499c870 100644 --- a/frontend/app/mstore/types/dashboard.ts +++ b/frontend/app/mstore/types/dashboard.ts @@ -30,6 +30,14 @@ export default class Dashboard { this.validate() } + updateInfo(data: any) { + runInAction(() => { + this.name = data.name || this.name + this.description = data.description || this.description + this.isPublic = data.isPublic + }) + } + toJson() { return { dashboardId: this.dashboardId, From 99dda00b5d07d65eaa797ad5f21826b3cf05c999 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 27 Feb 2023 11:39:16 +0100 Subject: [PATCH 135/218] feat(chalice): cleaned code --- api/chalicelib/core/projects.py | 9 ---- api/routers/core_dynamic.py | 69 ++++++++++++++---------------- ee/api/chalicelib/core/projects.py | 11 ----- ee/api/routers/core_dynamic.py | 66 +++++++++++++--------------- 4 files changed, 62 insertions(+), 93 deletions(-) diff --git a/api/chalicelib/core/projects.py b/api/chalicelib/core/projects.py index edecd5eba..24d1e01f7 100644 --- a/api/chalicelib/core/projects.py +++ b/api/chalicelib/core/projects.py @@ -212,15 +212,6 @@ def delete(tenant_id, user_id, project_id): return {"data": {"state": "success"}} -def count_by_tenant(tenant_id): - with pg_client.PostgresClient() as cur: - query = """SELECT count(1) AS count - FROM public.projects AS s - WHERE s.deleted_at IS NULL;""" - cur.execute(query=query) - return cur.fetchone()["count"] - - def get_gdpr(project_id): with pg_client.PostgresClient() as cur: query = cur.mogrify("""SELECT gdpr diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index cbbf229c1..3389074bf 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -47,22 +47,15 @@ async def get_account(context: schemas.CurrentContext = Depends(OR_context)): @app.post('/account', tags=["account"]) async def edit_account(data: schemas.EditUserSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data, editor_id=context.user_id) -@app.get('/projects/limit', tags=['projects']) -async def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)): - return {"data": { - "current": projects.count_by_tenant(tenant_id=context.tenant_id), - "remaining": -1 - }} - - @app.post('/integrations/slack', tags=['integrations']) @app.put('/integrations/slack', tags=['integrations']) -async def add_slack_integration(data: schemas.AddCollaborationSchema, context: schemas.CurrentContext = Depends(OR_context)): +async def add_slack_integration(data: schemas.AddCollaborationSchema, + context: schemas.CurrentContext = Depends(OR_context)): n = Slack.add(tenant_id=context.tenant_id, data=data) if n is None: return { @@ -73,7 +66,7 @@ async def add_slack_integration(data: schemas.AddCollaborationSchema, context: s @app.post('/integrations/slack/{integrationId}', tags=['integrations']) async def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): if len(data.url) > 0: old = Slack.get_integration(tenant_id=context.tenant_id, integration_id=integrationId) if not old: @@ -90,7 +83,7 @@ async def edit_slack_integration(integrationId: int, data: schemas.EditCollabora @app.post('/client/members', tags=["client"]) async def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data.dict(), background_tasks=background_tasks) @@ -127,14 +120,14 @@ async def change_password_by_invitation(data: schemas.EditPasswordByInvitationSc @app.put('/client/members/{memberId}', tags=["client"]) async def edit_member(memberId: int, data: schemas.EditMemberSchema, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, user_id_to_update=memberId) @app.get('/metadata/session_search', tags=["metadata"]) async def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = None, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): if key is None or value is None or len(value) == 0 and len(key) == 0: return {"errors": ["please provide a key&value for search"]} if len(value) == 0: @@ -154,7 +147,7 @@ async def get_projects(context: schemas.CurrentContext = Depends(OR_context)): @app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"]) async def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): return {"errors": ["session not found"]} data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, @@ -171,7 +164,7 @@ async def get_session(projectId: int, sessionId: Union[int, str], background_tas @app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"]) async def get_error_trace(projectId: int, sessionId: int, errorId: str, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_trace(project_id=projectId, error_id=errorId) if "errors" in data: return data @@ -182,19 +175,19 @@ async def get_error_trace(projectId: int, sessionId: int, errorId: str, @app.post('/{projectId}/errors/search', tags=['errors']) async def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": errors.search(data, projectId, user_id=context.user_id)} @app.get('/{projectId}/errors/stats', tags=['errors']) async def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return errors.stats(projectId, user_id=context.user_id, startTimestamp=startTimestamp, endTimestamp=endTimestamp) @app.get('/{projectId}/errors/{errorId}', tags=['errors']) async def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, - density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): + density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId, **{"density24": density24, "density30": density30}) if data.get("data") is not None: @@ -205,8 +198,8 @@ async def errors_get_details(projectId: int, errorId: str, background_tasks: Bac @app.get('/{projectId}/errors/{errorId}/stats', tags=['errors']) async def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7), - endDate: int = TimeUTC.now(), density: int = 7, - context: schemas.CurrentContext = Depends(OR_context)): + endDate: int = TimeUTC.now(), density: int = 7, + context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_details_chart(project_id=projectId, user_id=context.user_id, error_id=errorId, **{"startDate": startDate, "endDate": endDate, "density": density}) return data @@ -214,7 +207,7 @@ async def errors_get_details_right_column(projectId: int, errorId: str, startDat @app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors']) async def errors_get_details_sourcemaps(projectId: int, errorId: str, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_trace(project_id=projectId, error_id=errorId) if "errors" in data: return data @@ -225,7 +218,8 @@ async def errors_get_details_sourcemaps(projectId: int, errorId: str, @app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"]) async def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7), - endDate: int = TimeUTC.now(), context: schemas.CurrentContext = Depends(OR_context)): + endDate: int = TimeUTC.now(), + context: schemas.CurrentContext = Depends(OR_context)): if action == "favorite": return errors_favorite.favorite_error(project_id=projectId, user_id=context.user_id, error_id=errorId) elif action == "sessions": @@ -242,7 +236,7 @@ async def add_remove_favorite_error(projectId: int, errorId: str, action: str, s @app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"]) async def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId, @@ -257,7 +251,7 @@ async def get_live_session(projectId: int, sessionId: str, background_tasks: Bac @app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"]) async def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Replay file not found"]} if isinstance(sessionId, str): print(f"{sessionId} not a valid number.") @@ -277,7 +271,7 @@ async def get_live_session_replay_file(projectId: int, sessionId: Union[int, str @app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"]) async def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Devtools file not found"]} if isinstance(sessionId, str): print(f"{sessionId} not a valid number.") @@ -297,13 +291,13 @@ async def get_live_session_devtools_file(projectId: int, sessionId: Union[int, s @app.post('/{projectId}/heatmaps/url', tags=["heatmaps"]) async def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": heatmaps.get_by_url(project_id=projectId, data=data)} @app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"]) async def add_remove_favorite_session2(projectId: int, sessionId: int, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return { "data": sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId)} @@ -322,7 +316,7 @@ async def assign_session(projectId: int, sessionId, context: schemas.CurrentCont @app.get('/{projectId}/sessions/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"]) async def assign_session(projectId: int, sessionId: int, issueId: str, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId, tenant_id=context.tenant_id, user_id=context.user_id) if "errors" in data: @@ -333,8 +327,9 @@ async def assign_session(projectId: int, sessionId: int, issueId: str, @app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) -async def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): +async def comment_assignment(projectId: int, sessionId: int, issueId: str, + data: schemas.CommentAssignmentSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId, session_id=sessionId, assignment_id=issueId, user_id=context.user_id, message=data.message) @@ -347,7 +342,7 @@ async def comment_assignment(projectId: int, sessionId: int, issueId: str, data: @app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"]) async def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): if not sessions.session_exists(project_id=projectId, session_id=sessionId): return {"errors": ["Session not found"]} data = sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId, @@ -372,7 +367,7 @@ async def get_session_notes(projectId: int, sessionId: int, context: schemas.Cur @app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"]) async def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, note_id=noteId, data=data) if "errors" in data.keys(): @@ -391,21 +386,21 @@ async def delete_note(projectId: int, noteId: int, context: schemas.CurrentConte @app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"]) async def share_note_to_slack(projectId: int, noteId: int, webhookId: int, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, note_id=noteId, webhook_id=webhookId) @app.get('/{projectId}/notes/{noteId}/msteams/{webhookId}', tags=["sessions", "notes"]) async def share_note_to_msteams(projectId: int, noteId: int, webhookId: int, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return sessions_notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, note_id=noteId, webhook_id=webhookId) @app.post('/{projectId}/notes', tags=["sessions", "notes"]) async def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, data=data) if "errors" in data: @@ -415,5 +410,5 @@ async def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(.. @app.post('/{projectId}/click_maps/search', tags=["click maps"]) async def click_map_search(projectId: int, data: schemas.FlatClickMapSessionsSearch = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)} diff --git a/ee/api/chalicelib/core/projects.py b/ee/api/chalicelib/core/projects.py index 2e22422ad..dc06703ce 100644 --- a/ee/api/chalicelib/core/projects.py +++ b/ee/api/chalicelib/core/projects.py @@ -207,17 +207,6 @@ def delete(tenant_id, user_id, project_id): return {"data": {"state": "success"}} -def count_by_tenant(tenant_id): - with pg_client.PostgresClient() as cur: - query = cur.mogrify("""SELECT count(1) AS count - FROM public.projects AS s - WHERE s.deleted_at IS NULL - AND tenant_id= %(tenant_id)s;""", - {"tenant_id": tenant_id}) - cur.execute(query=query) - return cur.fetchone()["count"] - - def get_gdpr(project_id): with pg_client.PostgresClient() as cur: query = cur.mogrify("""SELECT gdpr diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index bbf9c767e..8c8aa55b6 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -52,19 +52,11 @@ async def get_account(context: schemas.CurrentContext = Depends(OR_context)): @app.post('/account', tags=["account"]) async def edit_account(data: schemas_ee.EditUserSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data, editor_id=context.user_id) -@app.get('/projects/limit', tags=['projects']) -async def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)): - return {"data": { - "current": projects.count_by_tenant(tenant_id=context.tenant_id), - "remaining": -1 - }} - - @app.post('/integrations/slack', tags=['integrations']) @app.put('/integrations/slack', tags=['integrations']) async def add_slack_client(data: schemas.AddCollaborationSchema, context: schemas.CurrentContext = Depends(OR_context)): @@ -78,7 +70,7 @@ async def add_slack_client(data: schemas.AddCollaborationSchema, context: schema @app.post('/integrations/slack/{integrationId}', tags=['integrations']) async def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): if len(data.url) > 0: old = Slack.get_integration(tenant_id=context.tenant_id, integration_id=integrationId) if not old: @@ -95,7 +87,7 @@ async def edit_slack_integration(integrationId: int, data: schemas.EditCollabora @app.post('/client/members', tags=["client"]) async def add_member(background_tasks: BackgroundTasks, data: schemas_ee.CreateMemberSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data.dict(), background_tasks=background_tasks) @@ -134,14 +126,14 @@ async def change_password_by_invitation(data: schemas.EditPasswordByInvitationSc @app.put('/client/members/{memberId}', tags=["client"]) async def edit_member(memberId: int, data: schemas_ee.EditMemberSchema, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, user_id_to_update=memberId) @app.get('/metadata/session_search', tags=["metadata"]) async def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = None, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): if key is None or value is None or len(value) == 0 and len(key) == 0: return {"errors": ["please provide a key&value for search"]} @@ -165,7 +157,7 @@ async def get_projects(context: schemas.CurrentContext = Depends(OR_context)): @app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"], dependencies=[OR_scope(Permissions.session_replay)]) async def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): return {"errors": ["session not found"]} data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, @@ -183,7 +175,7 @@ async def get_session(projectId: int, sessionId: Union[int, str], background_tas @app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"], dependencies=[OR_scope(Permissions.dev_tools)]) async def get_error_trace(projectId: int, sessionId: int, errorId: str, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_trace(project_id=projectId, error_id=errorId) if "errors" in data: return data @@ -194,19 +186,19 @@ async def get_error_trace(projectId: int, sessionId: int, errorId: str, @app.post('/{projectId}/errors/search', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) async def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": errors.search(data, projectId, user_id=context.user_id)} @app.get('/{projectId}/errors/stats', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) async def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return errors.stats(projectId, user_id=context.user_id, startTimestamp=startTimestamp, endTimestamp=endTimestamp) @app.get('/{projectId}/errors/{errorId}', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) async def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, - density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): + density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId, **{"density24": density24, "density30": density30}) if data.get("data") is not None: @@ -217,8 +209,8 @@ async def errors_get_details(projectId: int, errorId: str, background_tasks: Bac @app.get('/{projectId}/errors/{errorId}/stats', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) async def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7), - endDate: int = TimeUTC.now(), density: int = 7, - context: schemas.CurrentContext = Depends(OR_context)): + endDate: int = TimeUTC.now(), density: int = 7, + context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_details_chart(project_id=projectId, user_id=context.user_id, error_id=errorId, **{"startDate": startDate, "endDate": endDate, "density": density}) return data @@ -226,7 +218,7 @@ async def errors_get_details_right_column(projectId: int, errorId: str, startDat @app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) async def errors_get_details_sourcemaps(projectId: int, errorId: str, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_trace(project_id=projectId, error_id=errorId) if "errors" in data: return data @@ -237,7 +229,8 @@ async def errors_get_details_sourcemaps(projectId: int, errorId: str, @app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"], dependencies=[OR_scope(Permissions.dev_tools)]) async def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7), - endDate: int = TimeUTC.now(), context: schemas.CurrentContext = Depends(OR_context)): + endDate: int = TimeUTC.now(), + context: schemas.CurrentContext = Depends(OR_context)): if action == "favorite": return errors_favorite.favorite_error(project_id=projectId, user_id=context.user_id, error_id=errorId) elif action == "sessions": @@ -254,7 +247,7 @@ async def add_remove_favorite_error(projectId: int, errorId: str, action: str, s @app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"], dependencies=[OR_scope(Permissions.assist_live)]) async def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks, - context: schemas_ee.CurrentContext = Depends(OR_context)): + context: schemas_ee.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId, @@ -270,7 +263,7 @@ async def get_live_session(projectId: int, sessionId: str, background_tasks: Bac @app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"], dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay)]) async def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Replay file not found"]} if isinstance(sessionId, str): print(f"{sessionId} not a valid number.") @@ -291,7 +284,7 @@ async def get_live_session_replay_file(projectId: int, sessionId: Union[int, str @app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"], dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools)]) async def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Devtools file not found"]} if isinstance(sessionId, str): print(f"{sessionId} not a valid number.") @@ -311,14 +304,14 @@ async def get_live_session_devtools_file(projectId: int, sessionId: Union[int, s @app.post('/{projectId}/heatmaps/url', tags=["heatmaps"], dependencies=[OR_scope(Permissions.session_replay)]) async def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": heatmaps.get_by_url(project_id=projectId, data=data)} @app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"], dependencies=[OR_scope(Permissions.session_replay)]) async def add_remove_favorite_session2(projectId: int, sessionId: int, - context: schemas_ee.CurrentContext = Depends(OR_context)): + context: schemas_ee.CurrentContext = Depends(OR_context)): return { "data": sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId)} @@ -339,7 +332,7 @@ async def assign_session(projectId: int, sessionId, context: schemas.CurrentCont @app.get('/{projectId}/sessions/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"], dependencies=[OR_scope(Permissions.session_replay)]) async def assign_session(projectId: int, sessionId: int, issueId: str, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId, tenant_id=context.tenant_id, user_id=context.user_id) if "errors" in data: @@ -351,8 +344,9 @@ async def assign_session(projectId: int, sessionId: int, issueId: str, @app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"], dependencies=[OR_scope(Permissions.session_replay)]) -async def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): +async def comment_assignment(projectId: int, sessionId: int, issueId: str, + data: schemas.CommentAssignmentSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId, session_id=sessionId, assignment_id=issueId, user_id=context.user_id, message=data.message) @@ -366,7 +360,7 @@ async def comment_assignment(projectId: int, sessionId: int, issueId: str, data: @app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) async def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): if not sessions.session_exists(project_id=projectId, session_id=sessionId): return {"errors": ["Session not found"]} data = sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId, @@ -393,7 +387,7 @@ async def get_session_notes(projectId: int, sessionId: int, context: schemas.Cur @app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) async def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, note_id=noteId, data=data) if "errors" in data.keys(): @@ -414,21 +408,21 @@ async def delete_note(projectId: int, noteId: int, context: schemas.CurrentConte @app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) async def share_note_to_slack(projectId: int, noteId: int, webhookId: int, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, note_id=noteId, webhook_id=webhookId) @app.get('/{projectId}/notes/{noteId}/msteams/{webhookId}', tags=["sessions", "notes"]) async def share_note_to_msteams(projectId: int, noteId: int, webhookId: int, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return sessions_notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, note_id=noteId, webhook_id=webhookId) @app.post('/{projectId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) async def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, data=data) if "errors" in data: @@ -438,5 +432,5 @@ async def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(.. @app.post('/{projectId}/click_maps/search', tags=["click maps"], dependencies=[OR_scope(Permissions.session_replay)]) async def click_map_search(projectId: int, data: schemas.FlatClickMapSessionsSearch = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)} From 8a843a1af94d0d892d1af698c0e58037db6b501e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 27 Feb 2023 11:39:16 +0100 Subject: [PATCH 136/218] feat(chalice): cleaned code --- api/chalicelib/core/projects.py | 9 ---- api/routers/core_dynamic.py | 69 ++++++++++++++---------------- ee/api/chalicelib/core/projects.py | 11 ----- ee/api/routers/core_dynamic.py | 66 +++++++++++++--------------- 4 files changed, 62 insertions(+), 93 deletions(-) diff --git a/api/chalicelib/core/projects.py b/api/chalicelib/core/projects.py index edecd5eba..24d1e01f7 100644 --- a/api/chalicelib/core/projects.py +++ b/api/chalicelib/core/projects.py @@ -212,15 +212,6 @@ def delete(tenant_id, user_id, project_id): return {"data": {"state": "success"}} -def count_by_tenant(tenant_id): - with pg_client.PostgresClient() as cur: - query = """SELECT count(1) AS count - FROM public.projects AS s - WHERE s.deleted_at IS NULL;""" - cur.execute(query=query) - return cur.fetchone()["count"] - - def get_gdpr(project_id): with pg_client.PostgresClient() as cur: query = cur.mogrify("""SELECT gdpr diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index cbbf229c1..3389074bf 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -47,22 +47,15 @@ async def get_account(context: schemas.CurrentContext = Depends(OR_context)): @app.post('/account', tags=["account"]) async def edit_account(data: schemas.EditUserSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data, editor_id=context.user_id) -@app.get('/projects/limit', tags=['projects']) -async def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)): - return {"data": { - "current": projects.count_by_tenant(tenant_id=context.tenant_id), - "remaining": -1 - }} - - @app.post('/integrations/slack', tags=['integrations']) @app.put('/integrations/slack', tags=['integrations']) -async def add_slack_integration(data: schemas.AddCollaborationSchema, context: schemas.CurrentContext = Depends(OR_context)): +async def add_slack_integration(data: schemas.AddCollaborationSchema, + context: schemas.CurrentContext = Depends(OR_context)): n = Slack.add(tenant_id=context.tenant_id, data=data) if n is None: return { @@ -73,7 +66,7 @@ async def add_slack_integration(data: schemas.AddCollaborationSchema, context: s @app.post('/integrations/slack/{integrationId}', tags=['integrations']) async def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): if len(data.url) > 0: old = Slack.get_integration(tenant_id=context.tenant_id, integration_id=integrationId) if not old: @@ -90,7 +83,7 @@ async def edit_slack_integration(integrationId: int, data: schemas.EditCollabora @app.post('/client/members', tags=["client"]) async def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data.dict(), background_tasks=background_tasks) @@ -127,14 +120,14 @@ async def change_password_by_invitation(data: schemas.EditPasswordByInvitationSc @app.put('/client/members/{memberId}', tags=["client"]) async def edit_member(memberId: int, data: schemas.EditMemberSchema, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, user_id_to_update=memberId) @app.get('/metadata/session_search', tags=["metadata"]) async def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = None, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): if key is None or value is None or len(value) == 0 and len(key) == 0: return {"errors": ["please provide a key&value for search"]} if len(value) == 0: @@ -154,7 +147,7 @@ async def get_projects(context: schemas.CurrentContext = Depends(OR_context)): @app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"]) async def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): return {"errors": ["session not found"]} data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, @@ -171,7 +164,7 @@ async def get_session(projectId: int, sessionId: Union[int, str], background_tas @app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"]) async def get_error_trace(projectId: int, sessionId: int, errorId: str, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_trace(project_id=projectId, error_id=errorId) if "errors" in data: return data @@ -182,19 +175,19 @@ async def get_error_trace(projectId: int, sessionId: int, errorId: str, @app.post('/{projectId}/errors/search', tags=['errors']) async def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": errors.search(data, projectId, user_id=context.user_id)} @app.get('/{projectId}/errors/stats', tags=['errors']) async def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return errors.stats(projectId, user_id=context.user_id, startTimestamp=startTimestamp, endTimestamp=endTimestamp) @app.get('/{projectId}/errors/{errorId}', tags=['errors']) async def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, - density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): + density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId, **{"density24": density24, "density30": density30}) if data.get("data") is not None: @@ -205,8 +198,8 @@ async def errors_get_details(projectId: int, errorId: str, background_tasks: Bac @app.get('/{projectId}/errors/{errorId}/stats', tags=['errors']) async def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7), - endDate: int = TimeUTC.now(), density: int = 7, - context: schemas.CurrentContext = Depends(OR_context)): + endDate: int = TimeUTC.now(), density: int = 7, + context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_details_chart(project_id=projectId, user_id=context.user_id, error_id=errorId, **{"startDate": startDate, "endDate": endDate, "density": density}) return data @@ -214,7 +207,7 @@ async def errors_get_details_right_column(projectId: int, errorId: str, startDat @app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors']) async def errors_get_details_sourcemaps(projectId: int, errorId: str, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_trace(project_id=projectId, error_id=errorId) if "errors" in data: return data @@ -225,7 +218,8 @@ async def errors_get_details_sourcemaps(projectId: int, errorId: str, @app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"]) async def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7), - endDate: int = TimeUTC.now(), context: schemas.CurrentContext = Depends(OR_context)): + endDate: int = TimeUTC.now(), + context: schemas.CurrentContext = Depends(OR_context)): if action == "favorite": return errors_favorite.favorite_error(project_id=projectId, user_id=context.user_id, error_id=errorId) elif action == "sessions": @@ -242,7 +236,7 @@ async def add_remove_favorite_error(projectId: int, errorId: str, action: str, s @app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"]) async def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId, @@ -257,7 +251,7 @@ async def get_live_session(projectId: int, sessionId: str, background_tasks: Bac @app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"]) async def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Replay file not found"]} if isinstance(sessionId, str): print(f"{sessionId} not a valid number.") @@ -277,7 +271,7 @@ async def get_live_session_replay_file(projectId: int, sessionId: Union[int, str @app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"]) async def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Devtools file not found"]} if isinstance(sessionId, str): print(f"{sessionId} not a valid number.") @@ -297,13 +291,13 @@ async def get_live_session_devtools_file(projectId: int, sessionId: Union[int, s @app.post('/{projectId}/heatmaps/url', tags=["heatmaps"]) async def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": heatmaps.get_by_url(project_id=projectId, data=data)} @app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"]) async def add_remove_favorite_session2(projectId: int, sessionId: int, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return { "data": sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId)} @@ -322,7 +316,7 @@ async def assign_session(projectId: int, sessionId, context: schemas.CurrentCont @app.get('/{projectId}/sessions/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"]) async def assign_session(projectId: int, sessionId: int, issueId: str, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId, tenant_id=context.tenant_id, user_id=context.user_id) if "errors" in data: @@ -333,8 +327,9 @@ async def assign_session(projectId: int, sessionId: int, issueId: str, @app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) -async def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): +async def comment_assignment(projectId: int, sessionId: int, issueId: str, + data: schemas.CommentAssignmentSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId, session_id=sessionId, assignment_id=issueId, user_id=context.user_id, message=data.message) @@ -347,7 +342,7 @@ async def comment_assignment(projectId: int, sessionId: int, issueId: str, data: @app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"]) async def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): if not sessions.session_exists(project_id=projectId, session_id=sessionId): return {"errors": ["Session not found"]} data = sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId, @@ -372,7 +367,7 @@ async def get_session_notes(projectId: int, sessionId: int, context: schemas.Cur @app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"]) async def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, note_id=noteId, data=data) if "errors" in data.keys(): @@ -391,21 +386,21 @@ async def delete_note(projectId: int, noteId: int, context: schemas.CurrentConte @app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"]) async def share_note_to_slack(projectId: int, noteId: int, webhookId: int, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, note_id=noteId, webhook_id=webhookId) @app.get('/{projectId}/notes/{noteId}/msteams/{webhookId}', tags=["sessions", "notes"]) async def share_note_to_msteams(projectId: int, noteId: int, webhookId: int, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return sessions_notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, note_id=noteId, webhook_id=webhookId) @app.post('/{projectId}/notes', tags=["sessions", "notes"]) async def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, data=data) if "errors" in data: @@ -415,5 +410,5 @@ async def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(.. @app.post('/{projectId}/click_maps/search', tags=["click maps"]) async def click_map_search(projectId: int, data: schemas.FlatClickMapSessionsSearch = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)} diff --git a/ee/api/chalicelib/core/projects.py b/ee/api/chalicelib/core/projects.py index 2e22422ad..dc06703ce 100644 --- a/ee/api/chalicelib/core/projects.py +++ b/ee/api/chalicelib/core/projects.py @@ -207,17 +207,6 @@ def delete(tenant_id, user_id, project_id): return {"data": {"state": "success"}} -def count_by_tenant(tenant_id): - with pg_client.PostgresClient() as cur: - query = cur.mogrify("""SELECT count(1) AS count - FROM public.projects AS s - WHERE s.deleted_at IS NULL - AND tenant_id= %(tenant_id)s;""", - {"tenant_id": tenant_id}) - cur.execute(query=query) - return cur.fetchone()["count"] - - def get_gdpr(project_id): with pg_client.PostgresClient() as cur: query = cur.mogrify("""SELECT gdpr diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index bbf9c767e..8c8aa55b6 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -52,19 +52,11 @@ async def get_account(context: schemas.CurrentContext = Depends(OR_context)): @app.post('/account', tags=["account"]) async def edit_account(data: schemas_ee.EditUserSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data, editor_id=context.user_id) -@app.get('/projects/limit', tags=['projects']) -async def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)): - return {"data": { - "current": projects.count_by_tenant(tenant_id=context.tenant_id), - "remaining": -1 - }} - - @app.post('/integrations/slack', tags=['integrations']) @app.put('/integrations/slack', tags=['integrations']) async def add_slack_client(data: schemas.AddCollaborationSchema, context: schemas.CurrentContext = Depends(OR_context)): @@ -78,7 +70,7 @@ async def add_slack_client(data: schemas.AddCollaborationSchema, context: schema @app.post('/integrations/slack/{integrationId}', tags=['integrations']) async def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): if len(data.url) > 0: old = Slack.get_integration(tenant_id=context.tenant_id, integration_id=integrationId) if not old: @@ -95,7 +87,7 @@ async def edit_slack_integration(integrationId: int, data: schemas.EditCollabora @app.post('/client/members', tags=["client"]) async def add_member(background_tasks: BackgroundTasks, data: schemas_ee.CreateMemberSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data.dict(), background_tasks=background_tasks) @@ -134,14 +126,14 @@ async def change_password_by_invitation(data: schemas.EditPasswordByInvitationSc @app.put('/client/members/{memberId}', tags=["client"]) async def edit_member(memberId: int, data: schemas_ee.EditMemberSchema, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, user_id_to_update=memberId) @app.get('/metadata/session_search', tags=["metadata"]) async def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = None, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): if key is None or value is None or len(value) == 0 and len(key) == 0: return {"errors": ["please provide a key&value for search"]} @@ -165,7 +157,7 @@ async def get_projects(context: schemas.CurrentContext = Depends(OR_context)): @app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"], dependencies=[OR_scope(Permissions.session_replay)]) async def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): return {"errors": ["session not found"]} data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, @@ -183,7 +175,7 @@ async def get_session(projectId: int, sessionId: Union[int, str], background_tas @app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"], dependencies=[OR_scope(Permissions.dev_tools)]) async def get_error_trace(projectId: int, sessionId: int, errorId: str, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_trace(project_id=projectId, error_id=errorId) if "errors" in data: return data @@ -194,19 +186,19 @@ async def get_error_trace(projectId: int, sessionId: int, errorId: str, @app.post('/{projectId}/errors/search', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) async def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": errors.search(data, projectId, user_id=context.user_id)} @app.get('/{projectId}/errors/stats', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) async def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return errors.stats(projectId, user_id=context.user_id, startTimestamp=startTimestamp, endTimestamp=endTimestamp) @app.get('/{projectId}/errors/{errorId}', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) async def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, - density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): + density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId, **{"density24": density24, "density30": density30}) if data.get("data") is not None: @@ -217,8 +209,8 @@ async def errors_get_details(projectId: int, errorId: str, background_tasks: Bac @app.get('/{projectId}/errors/{errorId}/stats', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) async def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7), - endDate: int = TimeUTC.now(), density: int = 7, - context: schemas.CurrentContext = Depends(OR_context)): + endDate: int = TimeUTC.now(), density: int = 7, + context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_details_chart(project_id=projectId, user_id=context.user_id, error_id=errorId, **{"startDate": startDate, "endDate": endDate, "density": density}) return data @@ -226,7 +218,7 @@ async def errors_get_details_right_column(projectId: int, errorId: str, startDat @app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) async def errors_get_details_sourcemaps(projectId: int, errorId: str, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_trace(project_id=projectId, error_id=errorId) if "errors" in data: return data @@ -237,7 +229,8 @@ async def errors_get_details_sourcemaps(projectId: int, errorId: str, @app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"], dependencies=[OR_scope(Permissions.dev_tools)]) async def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7), - endDate: int = TimeUTC.now(), context: schemas.CurrentContext = Depends(OR_context)): + endDate: int = TimeUTC.now(), + context: schemas.CurrentContext = Depends(OR_context)): if action == "favorite": return errors_favorite.favorite_error(project_id=projectId, user_id=context.user_id, error_id=errorId) elif action == "sessions": @@ -254,7 +247,7 @@ async def add_remove_favorite_error(projectId: int, errorId: str, action: str, s @app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"], dependencies=[OR_scope(Permissions.assist_live)]) async def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks, - context: schemas_ee.CurrentContext = Depends(OR_context)): + context: schemas_ee.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId, @@ -270,7 +263,7 @@ async def get_live_session(projectId: int, sessionId: str, background_tasks: Bac @app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"], dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay)]) async def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Replay file not found"]} if isinstance(sessionId, str): print(f"{sessionId} not a valid number.") @@ -291,7 +284,7 @@ async def get_live_session_replay_file(projectId: int, sessionId: Union[int, str @app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"], dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools)]) async def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Devtools file not found"]} if isinstance(sessionId, str): print(f"{sessionId} not a valid number.") @@ -311,14 +304,14 @@ async def get_live_session_devtools_file(projectId: int, sessionId: Union[int, s @app.post('/{projectId}/heatmaps/url', tags=["heatmaps"], dependencies=[OR_scope(Permissions.session_replay)]) async def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": heatmaps.get_by_url(project_id=projectId, data=data)} @app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"], dependencies=[OR_scope(Permissions.session_replay)]) async def add_remove_favorite_session2(projectId: int, sessionId: int, - context: schemas_ee.CurrentContext = Depends(OR_context)): + context: schemas_ee.CurrentContext = Depends(OR_context)): return { "data": sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId)} @@ -339,7 +332,7 @@ async def assign_session(projectId: int, sessionId, context: schemas.CurrentCont @app.get('/{projectId}/sessions/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"], dependencies=[OR_scope(Permissions.session_replay)]) async def assign_session(projectId: int, sessionId: int, issueId: str, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId, tenant_id=context.tenant_id, user_id=context.user_id) if "errors" in data: @@ -351,8 +344,9 @@ async def assign_session(projectId: int, sessionId: int, issueId: str, @app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"], dependencies=[OR_scope(Permissions.session_replay)]) -async def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): +async def comment_assignment(projectId: int, sessionId: int, issueId: str, + data: schemas.CommentAssignmentSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId, session_id=sessionId, assignment_id=issueId, user_id=context.user_id, message=data.message) @@ -366,7 +360,7 @@ async def comment_assignment(projectId: int, sessionId: int, issueId: str, data: @app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) async def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): if not sessions.session_exists(project_id=projectId, session_id=sessionId): return {"errors": ["Session not found"]} data = sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId, @@ -393,7 +387,7 @@ async def get_session_notes(projectId: int, sessionId: int, context: schemas.Cur @app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) async def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, note_id=noteId, data=data) if "errors" in data.keys(): @@ -414,21 +408,21 @@ async def delete_note(projectId: int, noteId: int, context: schemas.CurrentConte @app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) async def share_note_to_slack(projectId: int, noteId: int, webhookId: int, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, note_id=noteId, webhook_id=webhookId) @app.get('/{projectId}/notes/{noteId}/msteams/{webhookId}', tags=["sessions", "notes"]) async def share_note_to_msteams(projectId: int, noteId: int, webhookId: int, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return sessions_notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, note_id=noteId, webhook_id=webhookId) @app.post('/{projectId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) async def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, data=data) if "errors" in data: @@ -438,5 +432,5 @@ async def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(.. @app.post('/{projectId}/click_maps/search', tags=["click maps"], dependencies=[OR_scope(Permissions.session_replay)]) async def click_map_search(projectId: int, data: schemas.FlatClickMapSessionsSearch = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)} From fb0e59184017ef0cac7cb3019e82086c352e063b Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 27 Feb 2023 11:48:14 +0100 Subject: [PATCH 137/218] change(ui) - settings menu alignment --- frontend/app/components/Header/SettingsMenu/SettingsMenu.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Header/SettingsMenu/SettingsMenu.tsx b/frontend/app/components/Header/SettingsMenu/SettingsMenu.tsx index e4d7851c4..2faf5e128 100644 --- a/frontend/app/components/Header/SettingsMenu/SettingsMenu.tsx +++ b/frontend/app/components/Header/SettingsMenu/SettingsMenu.tsx @@ -36,7 +36,7 @@ function SettingsMenu(props: RouteComponentProps) { return (
{isAdmin && ( <> From 2295d4f69a4658e3b131fae925fef60aef3afc0c Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Mon, 27 Feb 2023 14:22:33 +0100 Subject: [PATCH 138/218] fix(script): check migration versions Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay/files/dbops.sh | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/scripts/helmcharts/openreplay/files/dbops.sh b/scripts/helmcharts/openreplay/files/dbops.sh index 21ace9d2c..6402a3c88 100644 --- a/scripts/helmcharts/openreplay/files/dbops.sh +++ b/scripts/helmcharts/openreplay/files/dbops.sh @@ -33,7 +33,13 @@ function migration() { # Checking migration versions cd /opt/openreplay/openreplay/scripts/schema - migration_versions=(`ls -l db/init_dbs/$db | grep -E ^d | awk -v number=${PREVIOUS_APP_VERSION} '$NF > number {print $NF}' | grep -v create`) + + # We need to remove version dots + function normalise_version { + echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }' + } + all_versions=(`ls -l db/init_dbs/$db | grep -E ^d | grep -v create | awk '{print $NF}'`) + migration_versions=(`for ver in ${all_versions[*]}; do if [[ $(normalise_version $ver) > $(normalise_version "${PREVIOUS_APP_VERSION}") ]]; then echo $ver; fi; done`) echo "Migration version: ${migration_versions[*]}" # Can't pass the space seperated array to ansible for migration. So joining them with , joined_migration_versions=$(IFS=, ; echo "${migration_versions[*]}") From 478a8ea417982de5390c40f2111531ad60edef56 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 27 Feb 2023 15:49:26 +0100 Subject: [PATCH 139/218] fix(ui) - url search metadata --- frontend/app/types/filter/newFilter.js | 7 +++++++ frontend/app/utils/search.ts | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/frontend/app/types/filter/newFilter.js b/frontend/app/types/filter/newFilter.js index 286f1cc13..0fcc8e668 100644 --- a/frontend/app/types/filter/newFilter.js +++ b/frontend/app/types/filter/newFilter.js @@ -196,6 +196,13 @@ export default Record({ } } + if (!_filter) { + _filter = { + key: filter.key, + type: "MULTIPLE", + } + } + return { ..._filter, ...filter, diff --git a/frontend/app/utils/search.ts b/frontend/app/utils/search.ts index 4b32f8d13..5779df732 100644 --- a/frontend/app/utils/search.ts +++ b/frontend/app/utils/search.ts @@ -83,7 +83,7 @@ const getFiltersFromEntries = (entires: any) => { filter.operator = operator; if (filter.icon === "filters/metadata") { filter.source = filter.type; - filter.type = 'metadata'; + filter.type = 'METADATA'; } else { filter.source = sourceArr && sourceArr.length > 0 ? sourceArr : null; filter.sourceOperator = !!sourceOperator ? decodeURI(sourceOperator) : null; From 8f9bafdb85638385d8cb2c859d659293346117c2 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 27 Feb 2023 16:13:27 +0100 Subject: [PATCH 140/218] change(ui) - gridview - menu --- .../Dashboard/components/MetricsList/GridView.tsx | 1 + .../Dashboard/components/WidgetWrapper/WidgetWrapper.tsx | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/frontend/app/components/Dashboard/components/MetricsList/GridView.tsx b/frontend/app/components/Dashboard/components/MetricsList/GridView.tsx index 519e191f0..ab3f770f0 100644 --- a/frontend/app/components/Dashboard/components/MetricsList/GridView.tsx +++ b/frontend/app/components/Dashboard/components/MetricsList/GridView.tsx @@ -22,6 +22,7 @@ function GridView(props: Props) { onItemClick(parseInt(metric.metricId))} diff --git a/frontend/app/components/Dashboard/components/WidgetWrapper/WidgetWrapper.tsx b/frontend/app/components/Dashboard/components/WidgetWrapper/WidgetWrapper.tsx index 4dfdeefc8..cfbf993dd 100644 --- a/frontend/app/components/Dashboard/components/WidgetWrapper/WidgetWrapper.tsx +++ b/frontend/app/components/Dashboard/components/WidgetWrapper/WidgetWrapper.tsx @@ -28,6 +28,7 @@ interface Props { isWidget?: boolean; hideName?: boolean; grid?: string; + isGridView?: boolean; } function WidgetWrapper(props: Props & RouteComponentProps) { const { dashboardStore } = useStore(); @@ -40,6 +41,7 @@ function WidgetWrapper(props: Props & RouteComponentProps) { isTemplate = false, siteId, grid = '', + isGridView = false, } = props; const widget: any = props.widget; const isTimeSeries = widget.metricType === 'timeseries'; @@ -133,14 +135,14 @@ function WidgetWrapper(props: Props & RouteComponentProps) { ) : null} {isWidget && (
- {!isPredefined && isTimeSeries && ( + {!isPredefined && isTimeSeries && !isGridView && ( <>
)} - {!isTemplate && ( + {!isTemplate && !isGridView && ( Date: Mon, 27 Feb 2023 16:24:17 +0100 Subject: [PATCH 141/218] fix(init): ch password issue Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay/files/clickhouse.sh | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/scripts/helmcharts/openreplay/files/clickhouse.sh b/scripts/helmcharts/openreplay/files/clickhouse.sh index 891cc79c4..9f6a1cc73 100644 --- a/scripts/helmcharts/openreplay/files/clickhouse.sh +++ b/scripts/helmcharts/openreplay/files/clickhouse.sh @@ -4,6 +4,10 @@ set -ex clickhousedir=/opt/openreplay/openreplay/scripts/schema/db/init_dbs/clickhouse +[[ "${CH_PASSWORD}" == "" ]] || { + CH_PASSWORD="--password $CH_PASSWORD" +} + function migrate() { echo "Starting clickhouse migration" IFS=',' read -r -a migration_versions <<< "$1" @@ -11,7 +15,7 @@ function migrate() { echo "Migrating clickhouse version $version" # For now, we can ignore the clickhouse db inject errors. # TODO: Better error handling in script - clickhouse-client -h ${CH_HOST} --port ${CH_PORT} --user ${CH_USERNAME} --password ${CH_PASSWORD} --multiquery < ${clickhousedir}/${version}/${version}.sql || true + clickhouse-client -h ${CH_HOST} --port ${CH_PORT} --user ${CH_USERNAME} ${CH_PASSWORD} --multiquery < ${clickhousedir}/${version}/${version}.sql || true done } @@ -19,7 +23,7 @@ function init() { echo "Initializing clickhouse" for file in `ls ${clickhousedir}/create/*.sql`; do echo "Injecting $file" - clickhouse-client -h ${CH_HOST} --user ${CH_USERNAME} --password ${CH_PASSWORD} --port ${CH_PORT} --multiquery < $file || true + clickhouse-client -h ${CH_HOST} --user ${CH_USERNAME} ${CH_PASSWORD} --port ${CH_PORT} --multiquery < $file || true done } From bbf2bb19231d3e82a912ab49d7c1538383cf3c28 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 27 Feb 2023 16:53:01 +0100 Subject: [PATCH 142/218] feat(DB): support multiple upgrade executions --- ee/scripts/schema/db/init_dbs/clickhouse/1.10.0/1.10.0.sql | 2 +- ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql | 3 ++- scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql | 3 ++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/1.10.0/1.10.0.sql b/ee/scripts/schema/db/init_dbs/clickhouse/1.10.0/1.10.0.sql index 62a40e345..cc4816020 100644 --- a/ee/scripts/schema/db/init_dbs/clickhouse/1.10.0/1.10.0.sql +++ b/ee/scripts/schema/db/init_dbs/clickhouse/1.10.0/1.10.0.sql @@ -1,2 +1,2 @@ ALTER TABLE experimental.sessions - MODIFY COLUMN user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122,'BU'=123, 'VD'=124, 'YD'=125, 'DD'=126, 'BU'=127); + MODIFY COLUMN user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122,'BU'=123, 'VD'=124, 'YD'=125, 'DD'=126); diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql index 6461a1214..044b24176 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql @@ -258,7 +258,8 @@ $$ IF EXISTS(SELECT column_name FROM information_schema.columns WHERE table_name = 'metrics' - and column_name = 'is_predefined') THEN + AND column_name = 'is_predefined' + AND table_schema = 'public') THEN -- 0. change metric_of UPDATE metrics SET metric_of=coalesce(replace(get_global_key(metric_of), '"', ''), diff --git a/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql b/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql index 92c0964bb..f2fb3f839 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql @@ -232,7 +232,8 @@ $$ IF EXISTS(SELECT column_name FROM information_schema.columns WHERE table_name = 'metrics' - and column_name = 'is_predefined') THEN + AND column_name = 'is_predefined' + AND table_schema = 'public') THEN -- 0. change metric_of UPDATE metrics SET metric_of=coalesce(replace(get_global_key(metric_of), '"', ''), From 389018da05e40e2dac833249fc5359a5581ad153 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 27 Feb 2023 11:48:14 +0100 Subject: [PATCH 143/218] change(ui) - settings menu alignment --- frontend/app/components/Header/SettingsMenu/SettingsMenu.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Header/SettingsMenu/SettingsMenu.tsx b/frontend/app/components/Header/SettingsMenu/SettingsMenu.tsx index e4d7851c4..2faf5e128 100644 --- a/frontend/app/components/Header/SettingsMenu/SettingsMenu.tsx +++ b/frontend/app/components/Header/SettingsMenu/SettingsMenu.tsx @@ -36,7 +36,7 @@ function SettingsMenu(props: RouteComponentProps) { return (
{isAdmin && ( <> From 5b5017b4c3e5893b0ea109cedd2be61142fea8ac Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Mon, 27 Feb 2023 14:22:33 +0100 Subject: [PATCH 144/218] fix(script): check migration versions Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay/files/dbops.sh | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/scripts/helmcharts/openreplay/files/dbops.sh b/scripts/helmcharts/openreplay/files/dbops.sh index 21ace9d2c..6402a3c88 100644 --- a/scripts/helmcharts/openreplay/files/dbops.sh +++ b/scripts/helmcharts/openreplay/files/dbops.sh @@ -33,7 +33,13 @@ function migration() { # Checking migration versions cd /opt/openreplay/openreplay/scripts/schema - migration_versions=(`ls -l db/init_dbs/$db | grep -E ^d | awk -v number=${PREVIOUS_APP_VERSION} '$NF > number {print $NF}' | grep -v create`) + + # We need to remove version dots + function normalise_version { + echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }' + } + all_versions=(`ls -l db/init_dbs/$db | grep -E ^d | grep -v create | awk '{print $NF}'`) + migration_versions=(`for ver in ${all_versions[*]}; do if [[ $(normalise_version $ver) > $(normalise_version "${PREVIOUS_APP_VERSION}") ]]; then echo $ver; fi; done`) echo "Migration version: ${migration_versions[*]}" # Can't pass the space seperated array to ansible for migration. So joining them with , joined_migration_versions=$(IFS=, ; echo "${migration_versions[*]}") From c36a33d08f3460264929957136989085cd66a19b Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 27 Feb 2023 15:49:26 +0100 Subject: [PATCH 145/218] fix(ui) - url search metadata --- frontend/app/types/filter/newFilter.js | 7 +++++++ frontend/app/utils/search.ts | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/frontend/app/types/filter/newFilter.js b/frontend/app/types/filter/newFilter.js index 286f1cc13..0fcc8e668 100644 --- a/frontend/app/types/filter/newFilter.js +++ b/frontend/app/types/filter/newFilter.js @@ -196,6 +196,13 @@ export default Record({ } } + if (!_filter) { + _filter = { + key: filter.key, + type: "MULTIPLE", + } + } + return { ..._filter, ...filter, diff --git a/frontend/app/utils/search.ts b/frontend/app/utils/search.ts index 4b32f8d13..5779df732 100644 --- a/frontend/app/utils/search.ts +++ b/frontend/app/utils/search.ts @@ -83,7 +83,7 @@ const getFiltersFromEntries = (entires: any) => { filter.operator = operator; if (filter.icon === "filters/metadata") { filter.source = filter.type; - filter.type = 'metadata'; + filter.type = 'METADATA'; } else { filter.source = sourceArr && sourceArr.length > 0 ? sourceArr : null; filter.sourceOperator = !!sourceOperator ? decodeURI(sourceOperator) : null; From 8765cc7d8927d6b11aa21a1cc8a59606f913997d Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 27 Feb 2023 16:13:27 +0100 Subject: [PATCH 146/218] change(ui) - gridview - menu --- .../Dashboard/components/MetricsList/GridView.tsx | 1 + .../Dashboard/components/WidgetWrapper/WidgetWrapper.tsx | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/frontend/app/components/Dashboard/components/MetricsList/GridView.tsx b/frontend/app/components/Dashboard/components/MetricsList/GridView.tsx index 519e191f0..ab3f770f0 100644 --- a/frontend/app/components/Dashboard/components/MetricsList/GridView.tsx +++ b/frontend/app/components/Dashboard/components/MetricsList/GridView.tsx @@ -22,6 +22,7 @@ function GridView(props: Props) { onItemClick(parseInt(metric.metricId))} diff --git a/frontend/app/components/Dashboard/components/WidgetWrapper/WidgetWrapper.tsx b/frontend/app/components/Dashboard/components/WidgetWrapper/WidgetWrapper.tsx index 4dfdeefc8..cfbf993dd 100644 --- a/frontend/app/components/Dashboard/components/WidgetWrapper/WidgetWrapper.tsx +++ b/frontend/app/components/Dashboard/components/WidgetWrapper/WidgetWrapper.tsx @@ -28,6 +28,7 @@ interface Props { isWidget?: boolean; hideName?: boolean; grid?: string; + isGridView?: boolean; } function WidgetWrapper(props: Props & RouteComponentProps) { const { dashboardStore } = useStore(); @@ -40,6 +41,7 @@ function WidgetWrapper(props: Props & RouteComponentProps) { isTemplate = false, siteId, grid = '', + isGridView = false, } = props; const widget: any = props.widget; const isTimeSeries = widget.metricType === 'timeseries'; @@ -133,14 +135,14 @@ function WidgetWrapper(props: Props & RouteComponentProps) { ) : null} {isWidget && (
- {!isPredefined && isTimeSeries && ( + {!isPredefined && isTimeSeries && !isGridView && ( <>
)} - {!isTemplate && ( + {!isTemplate && !isGridView && ( Date: Mon, 27 Feb 2023 16:24:17 +0100 Subject: [PATCH 147/218] fix(init): ch password issue Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay/files/clickhouse.sh | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/scripts/helmcharts/openreplay/files/clickhouse.sh b/scripts/helmcharts/openreplay/files/clickhouse.sh index 891cc79c4..9f6a1cc73 100644 --- a/scripts/helmcharts/openreplay/files/clickhouse.sh +++ b/scripts/helmcharts/openreplay/files/clickhouse.sh @@ -4,6 +4,10 @@ set -ex clickhousedir=/opt/openreplay/openreplay/scripts/schema/db/init_dbs/clickhouse +[[ "${CH_PASSWORD}" == "" ]] || { + CH_PASSWORD="--password $CH_PASSWORD" +} + function migrate() { echo "Starting clickhouse migration" IFS=',' read -r -a migration_versions <<< "$1" @@ -11,7 +15,7 @@ function migrate() { echo "Migrating clickhouse version $version" # For now, we can ignore the clickhouse db inject errors. # TODO: Better error handling in script - clickhouse-client -h ${CH_HOST} --port ${CH_PORT} --user ${CH_USERNAME} --password ${CH_PASSWORD} --multiquery < ${clickhousedir}/${version}/${version}.sql || true + clickhouse-client -h ${CH_HOST} --port ${CH_PORT} --user ${CH_USERNAME} ${CH_PASSWORD} --multiquery < ${clickhousedir}/${version}/${version}.sql || true done } @@ -19,7 +23,7 @@ function init() { echo "Initializing clickhouse" for file in `ls ${clickhousedir}/create/*.sql`; do echo "Injecting $file" - clickhouse-client -h ${CH_HOST} --user ${CH_USERNAME} --password ${CH_PASSWORD} --port ${CH_PORT} --multiquery < $file || true + clickhouse-client -h ${CH_HOST} --user ${CH_USERNAME} ${CH_PASSWORD} --port ${CH_PORT} --multiquery < $file || true done } From ad0f3ab6439dd0dfb5642cefc53425fb7792a83f Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 27 Feb 2023 16:58:02 +0100 Subject: [PATCH 148/218] change(ui) - dashboard list text change --- .../Dashboard/components/DashboardList/DashboardList.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Dashboard/components/DashboardList/DashboardList.tsx b/frontend/app/components/Dashboard/components/DashboardList/DashboardList.tsx index 2b60ad40a..18c96f12a 100644 --- a/frontend/app/components/Dashboard/components/DashboardList/DashboardList.tsx +++ b/frontend/app/components/Dashboard/components/DashboardList/DashboardList.tsx @@ -37,7 +37,7 @@ function DashboardList() {
Title
Visibility
-
Created
+
Creation Date
{sliceListPerPage(list, dashboardStore.page - 1, dashboardStore.pageSize).map( From b1945a87e39d308c94aeeaa8983fc43b859a1efa Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 27 Feb 2023 17:12:58 +0100 Subject: [PATCH 149/218] change(ui) - player speed include 0.5 --- frontend/app/player/player/Player.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/app/player/player/Player.ts b/frontend/app/player/player/Player.ts index 837443f3f..9ebd04259 100644 --- a/frontend/app/player/player/Player.ts +++ b/frontend/app/player/player/Player.ts @@ -13,7 +13,7 @@ const SKIP_TO_ISSUE_STORAGE_KEY = "__$session-skipToIssue$__" const AUTOPLAY_STORAGE_KEY = "__$player-autoplay$__" const SHOW_EVENTS_STORAGE_KEY = "__$player-show-events$__" const storedSpeed: number = typedLocalStorage.number(SPEED_STORAGE_KEY) -const initialSpeed = [1, 2, 4, 8, 16].includes(storedSpeed) ? storedSpeed : 1 +const initialSpeed = [0.5, 1, 2, 4, 8, 16].includes(storedSpeed) ? storedSpeed : 0.5 const initialSkip = typedLocalStorage.boolean(SKIP_STORAGE_KEY) const initialSkipToIssue = typedLocalStorage.boolean(SKIP_TO_ISSUE_STORAGE_KEY) const initialAutoplay = typedLocalStorage.boolean(AUTOPLAY_STORAGE_KEY) @@ -91,7 +91,7 @@ export default class Player extends Animator { toggleSpeed() { const { speed } = this.pState.get() - this.updateSpeed(speed < HIGHEST_SPEED ? speed * 2 : 1) + this.updateSpeed(speed < HIGHEST_SPEED ? speed * 2 : 0.5) } speedUp() { From d61b727f6b2f36629dcbbc26d7c0740ba7f78344 Mon Sep 17 00:00:00 2001 From: Alex Kaminskii Date: Mon, 27 Feb 2023 17:37:55 +0100 Subject: [PATCH 150/218] fix(frontend): cancellabelTimeoutHook: clean only once --- frontend/app/hooks/useCancelableTimeout.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/hooks/useCancelableTimeout.ts b/frontend/app/hooks/useCancelableTimeout.ts index 9f12f336a..1ab4d032a 100644 --- a/frontend/app/hooks/useCancelableTimeout.ts +++ b/frontend/app/hooks/useCancelableTimeout.ts @@ -15,6 +15,6 @@ export default function useCancelableTimeout( clearTimeout(idRef.current) onCancel() } - useEffect(() => () => clearTimeout(idRef.current)) // auto-cancel without callback (clean) + useEffect(() => () => clearTimeout(idRef.current), []) // auto-cancel without callback (on clean) return [ triggerTimeout, cancelTimeout ] } \ No newline at end of file From 48fdd47ea7de9e93e7594909a4257d5ff4fc2afe Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Mon, 27 Feb 2023 20:42:06 +0100 Subject: [PATCH 151/218] chore(helm): support http protocol for OR acces --- .../openreplay/charts/alerts/templates/deployment.yaml | 4 ++-- .../openreplay/charts/assets/templates/deployment.yaml | 2 +- .../openreplay/charts/assist/templates/deployment.yaml | 2 +- .../openreplay/charts/chalice/templates/deployment.yaml | 4 ++-- .../openreplay/charts/http/templates/deployment.yaml | 2 +- .../openreplay/charts/sink/templates/deployment.yaml | 2 +- scripts/helmcharts/openreplay/values.yaml | 4 +++- 7 files changed, 11 insertions(+), 9 deletions(-) diff --git a/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml index d4c1d6e49..58ed2388f 100644 --- a/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml @@ -71,10 +71,10 @@ spec: value: '{{ .Values.global.postgresql.postgresqlPassword }}' {{- end}} - name: SITE_URL - value: 'https://{{ .Values.global.domainName }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}' - name: S3_HOST {{- if contains "minio" .Values.global.s3.endpoint }} - value: 'https://{{ .Values.global.domainName }}:{{ .Values.global.ingress.controller.service.ports.https}}' + value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else}} value: '{{ .Values.global.s3.endpoint }}' {{- end}} diff --git a/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml index f959adc13..cdfb0a302 100644 --- a/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml @@ -83,7 +83,7 @@ spec: - name: ASSETS_ORIGIN {{- if contains "minio" .Values.global.s3.endpoint }} # Local minio Installation - value: 'https://{{ .Values.global.domainName }}:{{.Values.global.ingress.controller.service.ports.https}}/{{.Values.global.s3.assetsBucket}}' + value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else if contains "amazonaws.com" .Values.global.s3.endpoint }} # AWS S3 # Ref: https://stackoverflow.com/questions/53634583/go-template-split-string-by-delimiter diff --git a/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml index 92ae9a93c..db42a3e0e 100644 --- a/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml @@ -51,7 +51,7 @@ spec: value: "{{ .Values.global.s3.region }}" - name: S3_HOST {{- if contains "minio" .Values.global.s3.endpoint }} - value: 'https://{{ .Values.global.domainName }}:{{ .Values.global.ingress.controller.service.ports.https}}' + value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else}} value: '{{ .Values.global.s3.endpoint }}' {{- end}} diff --git a/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml index a15553a8a..eaaab92c5 100644 --- a/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml @@ -81,10 +81,10 @@ spec: value: '{{ .Values.global.postgresql.postgresqlPassword }}' {{- end}} - name: SITE_URL - value: 'https://{{ .Values.global.domainName }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}' - name: S3_HOST {{- if contains "minio" .Values.global.s3.endpoint }} - value: 'https://{{ .Values.global.domainName }}:{{ .Values.global.ingress.controller.service.ports.https}}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else}} value: '{{ .Values.global.s3.endpoint }}' {{- end}} diff --git a/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml index 1add28054..0f46b47e0 100644 --- a/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml @@ -90,7 +90,7 @@ spec: - name: ASSETS_ORIGIN {{- if contains "minio" .Values.global.s3.endpoint }} # Local minio Installation - value: 'https://{{ .Values.global.domainName }}:{{.Values.global.ingress.controller.service.ports.https}}/{{.Values.global.s3.assetsBucket}}' + value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else if contains "amazonaws.com" .Values.global.s3.endpoint }} # AWS S3 # Ref: https://stackoverflow.com/questions/53634583/go-template-split-string-by-delimiter diff --git a/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml index 88bd89c1f..3b940658f 100644 --- a/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml @@ -59,7 +59,7 @@ spec: - name: ASSETS_ORIGIN {{- if contains "minio" .Values.global.s3.endpoint }} # Local minio Installation - value: 'https://{{ .Values.global.domainName }}:{{.Values.global.ingress.controller.service.ports.https}}/{{.Values.global.s3.assetsBucket}}' + value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else if contains "amazonaws.com" .Values.global.s3.endpoint }} # AWS S3 # Ref: https://stackoverflow.com/questions/53634583/go-template-split-string-by-delimiter diff --git a/scripts/helmcharts/openreplay/values.yaml b/scripts/helmcharts/openreplay/values.yaml index 694585180..3b4f9e985 100644 --- a/scripts/helmcharts/openreplay/values.yaml +++ b/scripts/helmcharts/openreplay/values.yaml @@ -37,5 +37,7 @@ global: vault: *vault redis: *redis clusterDomain: "svc.cluster.local" - # In case you've http proxy to access internet. env: {} + # If you're accessing OpenReplay with http, then update the value to http + ORSecureAccess: true + From 2490a323ace047f808a35691dfe6fea2e19c6bf0 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 28 Feb 2023 11:17:08 +0100 Subject: [PATCH 152/218] feat(chalice): fixed create&add card to dashboard at the same time --- api/chalicelib/core/dashboards.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/chalicelib/core/dashboards.py b/api/chalicelib/core/dashboards.py index 89f56176b..17826f762 100644 --- a/api/chalicelib/core/dashboards.py +++ b/api/chalicelib/core/dashboards.py @@ -121,7 +121,7 @@ def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashbo WHERE dashboards.project_id = %(projectId)s AND dashboard_id = %(dashboard_id)s AND (dashboards.user_id = %(userId)s OR is_public) - RETURNING dashboard_id,name,description,is_public,created_at;""" + RETURNING dashboard_id,name,description,is_public,created_at""" if data.metrics is not None and len(data.metrics) > 0: pg_query = f"""WITH dash AS ({pg_query}) INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config) From 2a5e478e641fd3de2d886e2ec429714132cb816e Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 27 Feb 2023 16:58:02 +0100 Subject: [PATCH 153/218] change(ui) - dashboard list text change --- .../Dashboard/components/DashboardList/DashboardList.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Dashboard/components/DashboardList/DashboardList.tsx b/frontend/app/components/Dashboard/components/DashboardList/DashboardList.tsx index 2b60ad40a..18c96f12a 100644 --- a/frontend/app/components/Dashboard/components/DashboardList/DashboardList.tsx +++ b/frontend/app/components/Dashboard/components/DashboardList/DashboardList.tsx @@ -37,7 +37,7 @@ function DashboardList() {
Title
Visibility
-
Created
+
Creation Date
{sliceListPerPage(list, dashboardStore.page - 1, dashboardStore.pageSize).map( From 63594548aefda60f76bde4ff2f6ee590591fa227 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 27 Feb 2023 17:12:58 +0100 Subject: [PATCH 154/218] change(ui) - player speed include 0.5 --- frontend/app/player/player/Player.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/app/player/player/Player.ts b/frontend/app/player/player/Player.ts index 837443f3f..9ebd04259 100644 --- a/frontend/app/player/player/Player.ts +++ b/frontend/app/player/player/Player.ts @@ -13,7 +13,7 @@ const SKIP_TO_ISSUE_STORAGE_KEY = "__$session-skipToIssue$__" const AUTOPLAY_STORAGE_KEY = "__$player-autoplay$__" const SHOW_EVENTS_STORAGE_KEY = "__$player-show-events$__" const storedSpeed: number = typedLocalStorage.number(SPEED_STORAGE_KEY) -const initialSpeed = [1, 2, 4, 8, 16].includes(storedSpeed) ? storedSpeed : 1 +const initialSpeed = [0.5, 1, 2, 4, 8, 16].includes(storedSpeed) ? storedSpeed : 0.5 const initialSkip = typedLocalStorage.boolean(SKIP_STORAGE_KEY) const initialSkipToIssue = typedLocalStorage.boolean(SKIP_TO_ISSUE_STORAGE_KEY) const initialAutoplay = typedLocalStorage.boolean(AUTOPLAY_STORAGE_KEY) @@ -91,7 +91,7 @@ export default class Player extends Animator { toggleSpeed() { const { speed } = this.pState.get() - this.updateSpeed(speed < HIGHEST_SPEED ? speed * 2 : 1) + this.updateSpeed(speed < HIGHEST_SPEED ? speed * 2 : 0.5) } speedUp() { From e9e2bc1e1e60a4de3c76702136f917fc17ab1af7 Mon Sep 17 00:00:00 2001 From: Alex Kaminskii Date: Mon, 27 Feb 2023 17:37:55 +0100 Subject: [PATCH 155/218] fix(frontend): cancellabelTimeoutHook: clean only once --- frontend/app/hooks/useCancelableTimeout.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/hooks/useCancelableTimeout.ts b/frontend/app/hooks/useCancelableTimeout.ts index 9f12f336a..1ab4d032a 100644 --- a/frontend/app/hooks/useCancelableTimeout.ts +++ b/frontend/app/hooks/useCancelableTimeout.ts @@ -15,6 +15,6 @@ export default function useCancelableTimeout( clearTimeout(idRef.current) onCancel() } - useEffect(() => () => clearTimeout(idRef.current)) // auto-cancel without callback (clean) + useEffect(() => () => clearTimeout(idRef.current), []) // auto-cancel without callback (on clean) return [ triggerTimeout, cancelTimeout ] } \ No newline at end of file From 5f86ae055d461dd88361aca2c887733d77693bee Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Mon, 27 Feb 2023 20:42:06 +0100 Subject: [PATCH 156/218] chore(helm): support http protocol for OR acces --- .../openreplay/charts/alerts/templates/deployment.yaml | 4 ++-- .../openreplay/charts/assets/templates/deployment.yaml | 2 +- .../openreplay/charts/assist/templates/deployment.yaml | 2 +- .../openreplay/charts/chalice/templates/deployment.yaml | 4 ++-- .../openreplay/charts/http/templates/deployment.yaml | 2 +- .../openreplay/charts/sink/templates/deployment.yaml | 2 +- scripts/helmcharts/openreplay/values.yaml | 4 +++- 7 files changed, 11 insertions(+), 9 deletions(-) diff --git a/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml index d4c1d6e49..58ed2388f 100644 --- a/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml @@ -71,10 +71,10 @@ spec: value: '{{ .Values.global.postgresql.postgresqlPassword }}' {{- end}} - name: SITE_URL - value: 'https://{{ .Values.global.domainName }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}' - name: S3_HOST {{- if contains "minio" .Values.global.s3.endpoint }} - value: 'https://{{ .Values.global.domainName }}:{{ .Values.global.ingress.controller.service.ports.https}}' + value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else}} value: '{{ .Values.global.s3.endpoint }}' {{- end}} diff --git a/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml index f959adc13..cdfb0a302 100644 --- a/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml @@ -83,7 +83,7 @@ spec: - name: ASSETS_ORIGIN {{- if contains "minio" .Values.global.s3.endpoint }} # Local minio Installation - value: 'https://{{ .Values.global.domainName }}:{{.Values.global.ingress.controller.service.ports.https}}/{{.Values.global.s3.assetsBucket}}' + value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else if contains "amazonaws.com" .Values.global.s3.endpoint }} # AWS S3 # Ref: https://stackoverflow.com/questions/53634583/go-template-split-string-by-delimiter diff --git a/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml index 92ae9a93c..db42a3e0e 100644 --- a/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml @@ -51,7 +51,7 @@ spec: value: "{{ .Values.global.s3.region }}" - name: S3_HOST {{- if contains "minio" .Values.global.s3.endpoint }} - value: 'https://{{ .Values.global.domainName }}:{{ .Values.global.ingress.controller.service.ports.https}}' + value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else}} value: '{{ .Values.global.s3.endpoint }}' {{- end}} diff --git a/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml index a15553a8a..eaaab92c5 100644 --- a/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml @@ -81,10 +81,10 @@ spec: value: '{{ .Values.global.postgresql.postgresqlPassword }}' {{- end}} - name: SITE_URL - value: 'https://{{ .Values.global.domainName }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}' - name: S3_HOST {{- if contains "minio" .Values.global.s3.endpoint }} - value: 'https://{{ .Values.global.domainName }}:{{ .Values.global.ingress.controller.service.ports.https}}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else}} value: '{{ .Values.global.s3.endpoint }}' {{- end}} diff --git a/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml index 1add28054..0f46b47e0 100644 --- a/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml @@ -90,7 +90,7 @@ spec: - name: ASSETS_ORIGIN {{- if contains "minio" .Values.global.s3.endpoint }} # Local minio Installation - value: 'https://{{ .Values.global.domainName }}:{{.Values.global.ingress.controller.service.ports.https}}/{{.Values.global.s3.assetsBucket}}' + value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else if contains "amazonaws.com" .Values.global.s3.endpoint }} # AWS S3 # Ref: https://stackoverflow.com/questions/53634583/go-template-split-string-by-delimiter diff --git a/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml index 88bd89c1f..3b940658f 100644 --- a/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml @@ -59,7 +59,7 @@ spec: - name: ASSETS_ORIGIN {{- if contains "minio" .Values.global.s3.endpoint }} # Local minio Installation - value: 'https://{{ .Values.global.domainName }}:{{.Values.global.ingress.controller.service.ports.https}}/{{.Values.global.s3.assetsBucket}}' + value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else if contains "amazonaws.com" .Values.global.s3.endpoint }} # AWS S3 # Ref: https://stackoverflow.com/questions/53634583/go-template-split-string-by-delimiter diff --git a/scripts/helmcharts/openreplay/values.yaml b/scripts/helmcharts/openreplay/values.yaml index 694585180..3b4f9e985 100644 --- a/scripts/helmcharts/openreplay/values.yaml +++ b/scripts/helmcharts/openreplay/values.yaml @@ -37,5 +37,7 @@ global: vault: *vault redis: *redis clusterDomain: "svc.cluster.local" - # In case you've http proxy to access internet. env: {} + # If you're accessing OpenReplay with http, then update the value to http + ORSecureAccess: true + From 9aec3ef2a0fb3708c637c3bc650bdef02b02fabc Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 28 Feb 2023 11:42:16 +0100 Subject: [PATCH 157/218] feat(chalice): fixed create&add card to dashboard at the same time --- api/chalicelib/core/dashboards.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/api/chalicelib/core/dashboards.py b/api/chalicelib/core/dashboards.py index 17826f762..d78a778ef 100644 --- a/api/chalicelib/core/dashboards.py +++ b/api/chalicelib/core/dashboards.py @@ -126,7 +126,9 @@ def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashbo pg_query = f"""WITH dash AS ({pg_query}) INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config) VALUES {",".join([f"(%(dashboard_id)s, %(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])} - RETURNING dash.*;""" + RETURNING (SELECT dashboard_id FROM dash),(SELECT name FROM dash), + (SELECT description FROM dash),(SELECT is_public FROM dash), + (SELECT created_at FROM dash);""" for i, m in enumerate(data.metrics): params[f"metric_id_{i}"] = m # params[f"config_{i}"] = schemas.AddWidgetToDashboardPayloadSchema.schema() \ From a7804b9cb5506d2f67fd707b74ba0985a3a3b5e1 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 28 Feb 2023 12:38:48 +0100 Subject: [PATCH 158/218] fix(ui) - table card - metricOf value --- frontend/app/mstore/metricStore.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/frontend/app/mstore/metricStore.ts b/frontend/app/mstore/metricStore.ts index 9ef49759d..dfdeaf0c9 100644 --- a/frontend/app/mstore/metricStore.ts +++ b/frontend/app/mstore/metricStore.ts @@ -130,6 +130,10 @@ export default class MetricStore { obj['metricValue'] = []; + if (value === TABLE) { + obj['metricOf'] = 'userId'; + } + if (value === TABLE || value === TIMESERIES) { obj['viewType'] = 'table'; } From 3c1d444660379af41e1b31df4254fe5dbb888b60 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 28 Feb 2023 12:46:01 +0100 Subject: [PATCH 159/218] fix(ui) - table card - metricOf value --- frontend/app/mstore/metricStore.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/frontend/app/mstore/metricStore.ts b/frontend/app/mstore/metricStore.ts index 9ef49759d..dfdeaf0c9 100644 --- a/frontend/app/mstore/metricStore.ts +++ b/frontend/app/mstore/metricStore.ts @@ -130,6 +130,10 @@ export default class MetricStore { obj['metricValue'] = []; + if (value === TABLE) { + obj['metricOf'] = 'userId'; + } + if (value === TABLE || value === TIMESERIES) { obj['viewType'] = 'table'; } From ea036f0e9b602dc3e0731ac8e5afc71bfb8d6218 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 28 Feb 2023 13:00:42 +0100 Subject: [PATCH 160/218] feat(chalice): fixed funnels --- api/chalicelib/core/significance.py | 12 +++++++----- ee/api/chalicelib/core/significance.py | 12 +++++++----- ee/api/chalicelib/core/significance_exp.py | 12 +++++++----- 3 files changed, 21 insertions(+), 15 deletions(-) diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index 8477cc985..822753be0 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -57,7 +57,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: f_k = f"f_value{i}" values = {**values, **sh.multi_values(helper.values_for_operator(value=f["value"], op=f["operator"]), - value_key=f_k)} + value_key=f_k)} if filter_type == schemas.FilterType.user_browser: # op = sessions.__get_sql_operator_multiple(f["operator"]) first_stage_extra_constraints.append( @@ -166,7 +166,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: continue values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]), - value_key=f"value{i + 1}")} + value_key=f"value{i + 1}")} if sh.is_negation_operator(op) and i > 0: op = sh.reverse_sql_operator(op) main_condition = "left_not.session_id ISNULL" @@ -180,7 +180,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: main_condition = "TRUE" else: main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s", - values=s["value"], value_key=f"value{i + 1}") + values=s["value"], value_key=f"value{i + 1}") n_stages_query.append(f""" (SELECT main.session_id, {"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp @@ -574,8 +574,10 @@ def get_top_insights(filter_d, project_id): # Obtain the first part of the output stages_list = get_stages(stages, rows) # Obtain the second part of the output - total_drop_due_to_issues = get_issues(stages, rows, first_stage=filter_d.get("firstStage"), - last_stage=filter_d.get("lastStage"), drop_only=True) + n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, + first_stage=filter_d.get("firstStage"), + last_stage=filter_d.get("lastStage"), + drop_only=True) return stages_list, total_drop_due_to_issues diff --git a/ee/api/chalicelib/core/significance.py b/ee/api/chalicelib/core/significance.py index 4e02eee41..bb9a53aaa 100644 --- a/ee/api/chalicelib/core/significance.py +++ b/ee/api/chalicelib/core/significance.py @@ -63,7 +63,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: f_k = f"f_value{i}" values = {**values, **sh.multi_values(helper.values_for_operator(value=f["value"], op=f["operator"]), - value_key=f_k)} + value_key=f_k)} if filter_type == schemas.FilterType.user_browser: # op = sessions.__get_sql_operator_multiple(f["operator"]) first_stage_extra_constraints.append( @@ -172,7 +172,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: continue values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]), - value_key=f"value{i + 1}")} + value_key=f"value{i + 1}")} if sh.is_negation_operator(op) and i > 0: op = sh.reverse_sql_operator(op) main_condition = "left_not.session_id ISNULL" @@ -186,7 +186,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: main_condition = "TRUE" else: main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s", - values=s["value"], value_key=f"value{i + 1}") + values=s["value"], value_key=f"value{i + 1}") n_stages_query.append(f""" (SELECT main.session_id, {"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp @@ -580,8 +580,10 @@ def get_top_insights(filter_d, project_id): # Obtain the first part of the output stages_list = get_stages(stages, rows) # Obtain the second part of the output - total_drop_due_to_issues = get_issues(stages, rows, first_stage=filter_d.get("firstStage"), - last_stage=filter_d.get("lastStage"), drop_only=True) + n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, + first_stage=filter_d.get("firstStage"), + last_stage=filter_d.get("lastStage"), + drop_only=True) return stages_list, total_drop_due_to_issues diff --git a/ee/api/chalicelib/core/significance_exp.py b/ee/api/chalicelib/core/significance_exp.py index 4e02eee41..bb9a53aaa 100644 --- a/ee/api/chalicelib/core/significance_exp.py +++ b/ee/api/chalicelib/core/significance_exp.py @@ -63,7 +63,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: f_k = f"f_value{i}" values = {**values, **sh.multi_values(helper.values_for_operator(value=f["value"], op=f["operator"]), - value_key=f_k)} + value_key=f_k)} if filter_type == schemas.FilterType.user_browser: # op = sessions.__get_sql_operator_multiple(f["operator"]) first_stage_extra_constraints.append( @@ -172,7 +172,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: continue values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]), - value_key=f"value{i + 1}")} + value_key=f"value{i + 1}")} if sh.is_negation_operator(op) and i > 0: op = sh.reverse_sql_operator(op) main_condition = "left_not.session_id ISNULL" @@ -186,7 +186,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: main_condition = "TRUE" else: main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s", - values=s["value"], value_key=f"value{i + 1}") + values=s["value"], value_key=f"value{i + 1}") n_stages_query.append(f""" (SELECT main.session_id, {"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp @@ -580,8 +580,10 @@ def get_top_insights(filter_d, project_id): # Obtain the first part of the output stages_list = get_stages(stages, rows) # Obtain the second part of the output - total_drop_due_to_issues = get_issues(stages, rows, first_stage=filter_d.get("firstStage"), - last_stage=filter_d.get("lastStage"), drop_only=True) + n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, + first_stage=filter_d.get("firstStage"), + last_stage=filter_d.get("lastStage"), + drop_only=True) return stages_list, total_drop_due_to_issues From 557024bc80ae7540a8c8cc776429904a31316d13 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 28 Feb 2023 12:38:48 +0100 Subject: [PATCH 161/218] fix(ui) - table card - metricOf value --- frontend/app/mstore/metricStore.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/frontend/app/mstore/metricStore.ts b/frontend/app/mstore/metricStore.ts index 9ef49759d..dfdeaf0c9 100644 --- a/frontend/app/mstore/metricStore.ts +++ b/frontend/app/mstore/metricStore.ts @@ -130,6 +130,10 @@ export default class MetricStore { obj['metricValue'] = []; + if (value === TABLE) { + obj['metricOf'] = 'userId'; + } + if (value === TABLE || value === TIMESERIES) { obj['viewType'] = 'table'; } From f042f85d75a7acdcbdedf3dc79d53a9368cf5ddb Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 28 Feb 2023 13:10:09 +0100 Subject: [PATCH 162/218] feat(chalice): fixed funnels --- api/chalicelib/core/significance.py | 2 +- ee/api/chalicelib/core/significance.py | 2 +- ee/api/chalicelib/core/significance_exp.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index 822753be0..937f00af2 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -258,7 +258,7 @@ def pearson_corr(x: list, y: list): return None, None, False if n == 2: - return math.copysign(1, x[1] - x[0]) * math.copysign(1, y[1] - y[0]), 1.0 + return math.copysign(1, x[1] - x[0]) * math.copysign(1, y[1] - y[0]), 1.0, True xmean = sum(x) / len(x) ymean = sum(y) / len(y) diff --git a/ee/api/chalicelib/core/significance.py b/ee/api/chalicelib/core/significance.py index bb9a53aaa..ac6eef548 100644 --- a/ee/api/chalicelib/core/significance.py +++ b/ee/api/chalicelib/core/significance.py @@ -264,7 +264,7 @@ def pearson_corr(x: list, y: list): return None, None, False if n == 2: - return math.copysign(1, x[1] - x[0]) * math.copysign(1, y[1] - y[0]), 1.0 + return math.copysign(1, x[1] - x[0]) * math.copysign(1, y[1] - y[0]), 1.0, True xmean = sum(x) / len(x) ymean = sum(y) / len(y) diff --git a/ee/api/chalicelib/core/significance_exp.py b/ee/api/chalicelib/core/significance_exp.py index bb9a53aaa..ac6eef548 100644 --- a/ee/api/chalicelib/core/significance_exp.py +++ b/ee/api/chalicelib/core/significance_exp.py @@ -264,7 +264,7 @@ def pearson_corr(x: list, y: list): return None, None, False if n == 2: - return math.copysign(1, x[1] - x[0]) * math.copysign(1, y[1] - y[0]), 1.0 + return math.copysign(1, x[1] - x[0]) * math.copysign(1, y[1] - y[0]), 1.0, True xmean = sum(x) / len(x) ymean = sum(y) / len(y) From 830de08919dd2c2421c94d9a6ec6c7a627baf976 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 28 Feb 2023 15:36:13 +0100 Subject: [PATCH 163/218] fix(ui) - network list ignore the fetch types from api --- frontend/app/player/web/MessageManager.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/frontend/app/player/web/MessageManager.ts b/frontend/app/player/web/MessageManager.ts index 72c07c445..f5989bed2 100644 --- a/frontend/app/player/web/MessageManager.ts +++ b/frontend/app/player/web/MessageManager.ts @@ -397,7 +397,9 @@ export default class MessageManager { break; case MType.ResourceTiming: // TODO: merge `resource` and `fetch` lists into one here instead of UI - this.lists.lists.resource.insert(getResourceFromResourceTiming(msg, this.sessionStart)) + if (msg.initiator !== ResourceType.FETCH) { + this.lists.lists.resource.insert(getResourceFromResourceTiming(msg, this.sessionStart)) + } break; case MType.Fetch: case MType.NetworkRequest: From bbc46724243e1ad508e93f52f122a5727e77fc95 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 28 Feb 2023 17:58:44 +0100 Subject: [PATCH 164/218] feat(chalice): configurable SP cert for SSO --- ee/api/chalicelib/utils/SAML2_helper.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ee/api/chalicelib/utils/SAML2_helper.py b/ee/api/chalicelib/utils/SAML2_helper.py index e2cd9584f..ba245f1fc 100644 --- a/ee/api/chalicelib/utils/SAML2_helper.py +++ b/ee/api/chalicelib/utils/SAML2_helper.py @@ -8,8 +8,8 @@ from onelogin.saml2.auth import OneLogin_Saml2_Auth from starlette.datastructures import FormData SAML2 = { - "strict": True, - "debug": True, + "strict": config("saml_strict", cast=bool, default=True), + "debug": config("saml_debug", cast=bool, default=True), "sp": { "entityId": config("SITE_URL") + "/api/sso/saml2/metadata/", "assertionConsumerService": { @@ -21,8 +21,8 @@ SAML2 = { "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" }, "NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress", - "x509cert": "", - "privateKey": "" + "x509cert": config("sp_x509cert", default=""), + "privateKey": config("sp_key", default="") }, "idp": None } From 9bf8ab432b1ada6fc0cd5c2162210766fd6596c8 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Wed, 1 Mar 2023 10:16:13 +0100 Subject: [PATCH 165/218] fix(ui) - text duplicate --- .../app/components/shared/LiveSessionList/LiveSessionList.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx b/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx index 11987d55b..2a3183270 100644 --- a/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx +++ b/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx @@ -127,7 +127,7 @@ function LiveSessionList(props: Props) {
Assist is the best way to support you users while they're browsing your site, - {' '}through live replay, co-browsing and video conferencing capabilities. Learn More.{' '} + {' '}through live replay, co-browsing and video conferencing capabilities. {' '} Date: Wed, 1 Mar 2023 17:03:03 +0100 Subject: [PATCH 166/218] = Do not expose OpenReplay to indexing --- frontend/app/assets/index.html | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/app/assets/index.html b/frontend/app/assets/index.html index 75914f4fb..edb51ad24 100644 --- a/frontend/app/assets/index.html +++ b/frontend/app/assets/index.html @@ -3,6 +3,7 @@ OpenReplay + From c46ef1dc0e3050697f7ea1894a96e53df386d48d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 1 Mar 2023 19:14:25 +0100 Subject: [PATCH 167/218] feat(chalice): changed env-var name --- ee/api/chalicelib/utils/SAML2_helper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/api/chalicelib/utils/SAML2_helper.py b/ee/api/chalicelib/utils/SAML2_helper.py index ba245f1fc..f12f7e795 100644 --- a/ee/api/chalicelib/utils/SAML2_helper.py +++ b/ee/api/chalicelib/utils/SAML2_helper.py @@ -21,7 +21,7 @@ SAML2 = { "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" }, "NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress", - "x509cert": config("sp_x509cert", default=""), + "x509cert": config("sp_crt", default=""), "privateKey": config("sp_key", default="") }, "idp": None From e100774e8c7c3ce75e5d24b36f7d864e9af7e9dc Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Wed, 1 Mar 2023 21:41:48 +0100 Subject: [PATCH 168/218] fix(helm): protocol prefix for domain --- .../openreplay/charts/alerts/templates/deployment.yaml | 2 +- .../openreplay/charts/assets/templates/deployment.yaml | 2 +- .../openreplay/charts/assist/templates/deployment.yaml | 2 +- .../helmcharts/openreplay/charts/http/templates/deployment.yaml | 2 +- .../helmcharts/openreplay/charts/sink/templates/deployment.yaml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml index 58ed2388f..fa8dbf5a8 100644 --- a/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml @@ -74,7 +74,7 @@ spec: value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}' - name: S3_HOST {{- if contains "minio" .Values.global.s3.endpoint }} - value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else}} value: '{{ .Values.global.s3.endpoint }}' {{- end}} diff --git a/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml index cdfb0a302..2d03a1e56 100644 --- a/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml @@ -83,7 +83,7 @@ spec: - name: ASSETS_ORIGIN {{- if contains "minio" .Values.global.s3.endpoint }} # Local minio Installation - value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else if contains "amazonaws.com" .Values.global.s3.endpoint }} # AWS S3 # Ref: https://stackoverflow.com/questions/53634583/go-template-split-string-by-delimiter diff --git a/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml index db42a3e0e..a4af9a9f8 100644 --- a/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml @@ -51,7 +51,7 @@ spec: value: "{{ .Values.global.s3.region }}" - name: S3_HOST {{- if contains "minio" .Values.global.s3.endpoint }} - value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else}} value: '{{ .Values.global.s3.endpoint }}' {{- end}} diff --git a/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml index 0f46b47e0..b8a691938 100644 --- a/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml @@ -90,7 +90,7 @@ spec: - name: ASSETS_ORIGIN {{- if contains "minio" .Values.global.s3.endpoint }} # Local minio Installation - value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else if contains "amazonaws.com" .Values.global.s3.endpoint }} # AWS S3 # Ref: https://stackoverflow.com/questions/53634583/go-template-split-string-by-delimiter diff --git a/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml index 3b940658f..5669aa1e0 100644 --- a/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml @@ -59,7 +59,7 @@ spec: - name: ASSETS_ORIGIN {{- if contains "minio" .Values.global.s3.endpoint }} # Local minio Installation - value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else if contains "amazonaws.com" .Values.global.s3.endpoint }} # AWS S3 # Ref: https://stackoverflow.com/questions/53634583/go-template-split-string-by-delimiter From 0ad06b5aafa659e71c9bc4fb2be1517d012b5af4 Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Wed, 1 Mar 2023 21:49:22 +0100 Subject: [PATCH 169/218] fix(helm): protocol prefix for domain (#1000) --- .../openreplay/charts/alerts/templates/deployment.yaml | 2 +- .../openreplay/charts/assets/templates/deployment.yaml | 2 +- .../openreplay/charts/assist/templates/deployment.yaml | 2 +- .../helmcharts/openreplay/charts/http/templates/deployment.yaml | 2 +- .../helmcharts/openreplay/charts/sink/templates/deployment.yaml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml index 58ed2388f..fa8dbf5a8 100644 --- a/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/alerts/templates/deployment.yaml @@ -74,7 +74,7 @@ spec: value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}' - name: S3_HOST {{- if contains "minio" .Values.global.s3.endpoint }} - value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else}} value: '{{ .Values.global.s3.endpoint }}' {{- end}} diff --git a/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml index cdfb0a302..2d03a1e56 100644 --- a/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml @@ -83,7 +83,7 @@ spec: - name: ASSETS_ORIGIN {{- if contains "minio" .Values.global.s3.endpoint }} # Local minio Installation - value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else if contains "amazonaws.com" .Values.global.s3.endpoint }} # AWS S3 # Ref: https://stackoverflow.com/questions/53634583/go-template-split-string-by-delimiter diff --git a/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml index db42a3e0e..a4af9a9f8 100644 --- a/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml @@ -51,7 +51,7 @@ spec: value: "{{ .Values.global.s3.region }}" - name: S3_HOST {{- if contains "minio" .Values.global.s3.endpoint }} - value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else}} value: '{{ .Values.global.s3.endpoint }}' {{- end}} diff --git a/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml index 0f46b47e0..b8a691938 100644 --- a/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml @@ -90,7 +90,7 @@ spec: - name: ASSETS_ORIGIN {{- if contains "minio" .Values.global.s3.endpoint }} # Local minio Installation - value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else if contains "amazonaws.com" .Values.global.s3.endpoint }} # AWS S3 # Ref: https://stackoverflow.com/questions/53634583/go-template-split-string-by-delimiter diff --git a/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml index 3b940658f..5669aa1e0 100644 --- a/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml @@ -59,7 +59,7 @@ spec: - name: ASSETS_ORIGIN {{- if contains "minio" .Values.global.s3.endpoint }} # Local minio Installation - value: '{{ .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' {{- else if contains "amazonaws.com" .Values.global.s3.endpoint }} # AWS S3 # Ref: https://stackoverflow.com/questions/53634583/go-template-split-string-by-delimiter From 3e46f0360e178db0bafbc7b54df0afa9ea254ca9 Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Wed, 1 Mar 2023 22:14:21 +0100 Subject: [PATCH 170/218] fix(helm): rewrite url (#1001) --- .../openreplay/charts/assets/templates/deployment.yaml | 2 +- .../helmcharts/openreplay/charts/http/templates/deployment.yaml | 2 +- .../helmcharts/openreplay/charts/sink/templates/deployment.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml index 2d03a1e56..6c96f33d8 100644 --- a/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml @@ -83,7 +83,7 @@ spec: - name: ASSETS_ORIGIN {{- if contains "minio" .Values.global.s3.endpoint }} # Local minio Installation - value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}/{{.Values.global.s3.assetsBucket}}' {{- else if contains "amazonaws.com" .Values.global.s3.endpoint }} # AWS S3 # Ref: https://stackoverflow.com/questions/53634583/go-template-split-string-by-delimiter diff --git a/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml index b8a691938..4072629b0 100644 --- a/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml @@ -90,7 +90,7 @@ spec: - name: ASSETS_ORIGIN {{- if contains "minio" .Values.global.s3.endpoint }} # Local minio Installation - value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}/{{.Values.global.s3.assetsBucket}}' {{- else if contains "amazonaws.com" .Values.global.s3.endpoint }} # AWS S3 # Ref: https://stackoverflow.com/questions/53634583/go-template-split-string-by-delimiter diff --git a/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml index 5669aa1e0..45685fbec 100644 --- a/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml @@ -59,7 +59,7 @@ spec: - name: ASSETS_ORIGIN {{- if contains "minio" .Values.global.s3.endpoint }} # Local minio Installation - value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}/{{.Values.global.s3.assetsBucket}}' {{- else if contains "amazonaws.com" .Values.global.s3.endpoint }} # AWS S3 # Ref: https://stackoverflow.com/questions/53634583/go-template-split-string-by-delimiter From 936f916ff6fc868ca00ad2f59ea5261856babd91 Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Wed, 1 Mar 2023 22:14:21 +0100 Subject: [PATCH 171/218] fix(helm): rewrite url (#1001) --- .../openreplay/charts/assets/templates/deployment.yaml | 2 +- .../helmcharts/openreplay/charts/http/templates/deployment.yaml | 2 +- .../helmcharts/openreplay/charts/sink/templates/deployment.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml index 2d03a1e56..6c96f33d8 100644 --- a/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/assets/templates/deployment.yaml @@ -83,7 +83,7 @@ spec: - name: ASSETS_ORIGIN {{- if contains "minio" .Values.global.s3.endpoint }} # Local minio Installation - value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}/{{.Values.global.s3.assetsBucket}}' {{- else if contains "amazonaws.com" .Values.global.s3.endpoint }} # AWS S3 # Ref: https://stackoverflow.com/questions/53634583/go-template-split-string-by-delimiter diff --git a/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml index b8a691938..4072629b0 100644 --- a/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/http/templates/deployment.yaml @@ -90,7 +90,7 @@ spec: - name: ASSETS_ORIGIN {{- if contains "minio" .Values.global.s3.endpoint }} # Local minio Installation - value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}/{{.Values.global.s3.assetsBucket}}' {{- else if contains "amazonaws.com" .Values.global.s3.endpoint }} # AWS S3 # Ref: https://stackoverflow.com/questions/53634583/go-template-split-string-by-delimiter diff --git a/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml index 5669aa1e0..45685fbec 100644 --- a/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/sink/templates/deployment.yaml @@ -59,7 +59,7 @@ spec: - name: ASSETS_ORIGIN {{- if contains "minio" .Values.global.s3.endpoint }} # Local minio Installation - value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}' + value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}/{{.Values.global.s3.assetsBucket}}' {{- else if contains "amazonaws.com" .Values.global.s3.endpoint }} # AWS S3 # Ref: https://stackoverflow.com/questions/53634583/go-template-split-string-by-delimiter From 532ddcce93773bc20bca298ab9a3454f8e2593c7 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Thu, 2 Mar 2023 12:20:05 +0100 Subject: [PATCH 172/218] fix(cli): prioritize openreplay package path Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay-cli | 2 +- .../helmcharts/openreplay/templates/job.yaml | 28 +++++++++++++++++++ 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/scripts/helmcharts/openreplay-cli b/scripts/helmcharts/openreplay-cli index a470051b8..4a1b473d7 100755 --- a/scripts/helmcharts/openreplay-cli +++ b/scripts/helmcharts/openreplay-cli @@ -14,7 +14,7 @@ tmp_dir=$(mktemp -d) [[ -d $OR_DIR ]] || { sudo mkdir $OR_DIR } -export PATH=$PATH:/var/lib/openreplay +export PATH=/var/lib/openreplay:$PATH tools=( zyedidia/eget diff --git a/scripts/helmcharts/openreplay/templates/job.yaml b/scripts/helmcharts/openreplay/templates/job.yaml index 3e0494d7f..bd9fe474a 100644 --- a/scripts/helmcharts/openreplay/templates/job.yaml +++ b/scripts/helmcharts/openreplay/templates/job.yaml @@ -39,6 +39,10 @@ spec: - name: {{ $key }} value: '{{ $val }}' {{- end }} + {{- range $key, $val := .Values.migration.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: ENTERPRISE_EDITION_LICENSE value: "{{ .Values.global.enterpriseEditionLicense }}" command: @@ -111,6 +115,10 @@ spec: {{- else }} value: '{{ .Values.global.postgresql.postgresqlPassword }}' {{- end}} + {{- range $key, $val := .Values.migration.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.global.env }} - name: {{ $key }} value: '{{ $val }}' @@ -134,6 +142,10 @@ spec: - name: {{ $key }} value: '{{ $val }}' {{- end }} + {{- range $key, $val := .Values.migration.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: FORCE_MIGRATION value: "{{ .Values.forceMigration }}" - name: UPGRADE_FRONTENT @@ -168,6 +180,10 @@ spec: - name: {{ $key }} value: '{{ $val }}' {{- end }} + {{- range $key, $val := .Values.migration.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: FORCE_MIGRATION value: "{{ .Values.forceMigration }}" - name: PGHOST @@ -197,6 +213,10 @@ spec: - name: {{ $key }} value: '{{ $val }}' {{- end }} + {{- range $key, $val := .Values.migration.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: AWS_ACCESS_KEY_ID value: "{{ .Values.global.s3.accessKey }}" - name: AWS_SECRET_ACCESS_KEY @@ -245,6 +265,10 @@ spec: - name: {{ $key }} value: '{{ $val }}' {{- end }} + {{- range $key, $val := .Values.migration.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: FORCE_MIGRATION value: "{{ .Values.forceMigration }}" - name: PREVIOUS_APP_VERSION @@ -276,6 +300,10 @@ spec: - name: {{ $key }} value: '{{ $val }}' {{- end }} + {{- range $key, $val := .Values.migration.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: RETENTION_TIME value: "{{ .Values.global.kafka.retentionTime }}" - name: KAFKA_HOST From e2d4d2ae6bb6edc941e6824a344b1b7c962279ce Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Thu, 2 Mar 2023 12:21:21 +0100 Subject: [PATCH 173/218] fix(cli): prioritize openreplay package path (#1002) Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay-cli | 2 +- .../helmcharts/openreplay/templates/job.yaml | 28 +++++++++++++++++++ 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/scripts/helmcharts/openreplay-cli b/scripts/helmcharts/openreplay-cli index a470051b8..4a1b473d7 100755 --- a/scripts/helmcharts/openreplay-cli +++ b/scripts/helmcharts/openreplay-cli @@ -14,7 +14,7 @@ tmp_dir=$(mktemp -d) [[ -d $OR_DIR ]] || { sudo mkdir $OR_DIR } -export PATH=$PATH:/var/lib/openreplay +export PATH=/var/lib/openreplay:$PATH tools=( zyedidia/eget diff --git a/scripts/helmcharts/openreplay/templates/job.yaml b/scripts/helmcharts/openreplay/templates/job.yaml index 3e0494d7f..bd9fe474a 100644 --- a/scripts/helmcharts/openreplay/templates/job.yaml +++ b/scripts/helmcharts/openreplay/templates/job.yaml @@ -39,6 +39,10 @@ spec: - name: {{ $key }} value: '{{ $val }}' {{- end }} + {{- range $key, $val := .Values.migration.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: ENTERPRISE_EDITION_LICENSE value: "{{ .Values.global.enterpriseEditionLicense }}" command: @@ -111,6 +115,10 @@ spec: {{- else }} value: '{{ .Values.global.postgresql.postgresqlPassword }}' {{- end}} + {{- range $key, $val := .Values.migration.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} {{- range $key, $val := .Values.global.env }} - name: {{ $key }} value: '{{ $val }}' @@ -134,6 +142,10 @@ spec: - name: {{ $key }} value: '{{ $val }}' {{- end }} + {{- range $key, $val := .Values.migration.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: FORCE_MIGRATION value: "{{ .Values.forceMigration }}" - name: UPGRADE_FRONTENT @@ -168,6 +180,10 @@ spec: - name: {{ $key }} value: '{{ $val }}' {{- end }} + {{- range $key, $val := .Values.migration.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: FORCE_MIGRATION value: "{{ .Values.forceMigration }}" - name: PGHOST @@ -197,6 +213,10 @@ spec: - name: {{ $key }} value: '{{ $val }}' {{- end }} + {{- range $key, $val := .Values.migration.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: AWS_ACCESS_KEY_ID value: "{{ .Values.global.s3.accessKey }}" - name: AWS_SECRET_ACCESS_KEY @@ -245,6 +265,10 @@ spec: - name: {{ $key }} value: '{{ $val }}' {{- end }} + {{- range $key, $val := .Values.migration.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: FORCE_MIGRATION value: "{{ .Values.forceMigration }}" - name: PREVIOUS_APP_VERSION @@ -276,6 +300,10 @@ spec: - name: {{ $key }} value: '{{ $val }}' {{- end }} + {{- range $key, $val := .Values.migration.env }} + - name: {{ $key }} + value: '{{ $val }}' + {{- end }} - name: RETENTION_TIME value: "{{ .Values.global.kafka.retentionTime }}" - name: KAFKA_HOST From 86ac9cc499c514ddb06cce7360eb87f9a3d2d0a1 Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Thu, 2 Mar 2023 12:36:35 +0100 Subject: [PATCH 174/218] chore(helm): Adding default env for migration (#1003) Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay/values.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/helmcharts/openreplay/values.yaml b/scripts/helmcharts/openreplay/values.yaml index 3b4f9e985..5168ffabd 100644 --- a/scripts/helmcharts/openreplay/values.yaml +++ b/scripts/helmcharts/openreplay/values.yaml @@ -2,6 +2,9 @@ migrationJob: podAnnotations: linkerd.io/inject: disabled +migration: + env: {} + redis: &redis tls: enabled: false From db2ab0c91e3392c70dff5aac6dd2de5ad33344e7 Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Thu, 2 Mar 2023 15:32:34 +0100 Subject: [PATCH 175/218] chore(helm): Adding ch backup env vars (#1004) Signed-off-by: rjshrjndrn --- .../databases/charts/clickhouse/templates/statefulset.yaml | 2 -- scripts/helmcharts/databases/charts/clickhouse/values.yaml | 5 ++++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/helmcharts/databases/charts/clickhouse/templates/statefulset.yaml b/scripts/helmcharts/databases/charts/clickhouse/templates/statefulset.yaml index 5fa48fcf8..ebf79cfe2 100644 --- a/scripts/helmcharts/databases/charts/clickhouse/templates/statefulset.yaml +++ b/scripts/helmcharts/databases/charts/clickhouse/templates/statefulset.yaml @@ -33,8 +33,6 @@ spec: value: "{{ .Values.username }}" - name: CLICKHOUSE_PASSWORD value: "{{ .Values.password }}" - - name: API_LISTEN - value: "0.0.0.0:7171" {{- range $key, $value := .Values.backupEnv }} - name: "{{ $key }}" value: "{{ $value }}" diff --git a/scripts/helmcharts/databases/charts/clickhouse/values.yaml b/scripts/helmcharts/databases/charts/clickhouse/values.yaml index e87942fb5..f3f39aefd 100644 --- a/scripts/helmcharts/databases/charts/clickhouse/values.yaml +++ b/scripts/helmcharts/databases/charts/clickhouse/values.yaml @@ -46,7 +46,10 @@ securityContext: {} env: {} backupEnv: - BACKUPS_TO_KEEP_LOCAL: "2" + API_LISTEN: "0.0.0.0:7171" + BACKUPS_TO_KEEP_LOCAL: "1" + CLICKHOUSE_HOST: "localhost" + CLICKHOUSE_PORT: "9000" # REMOTE_STORAGE=s3 # S3_ACCESS_KEY=key # S3_SECRET_KEY=password From 598eafad8b0e9d84ed1d8d79eb4b7414ff08ef00 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 3 Mar 2023 09:30:31 +0100 Subject: [PATCH 176/218] fix(ui) - xhr request duplicate --- frontend/app/player/web/MessageManager.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/player/web/MessageManager.ts b/frontend/app/player/web/MessageManager.ts index f5989bed2..330d1c861 100644 --- a/frontend/app/player/web/MessageManager.ts +++ b/frontend/app/player/web/MessageManager.ts @@ -397,7 +397,7 @@ export default class MessageManager { break; case MType.ResourceTiming: // TODO: merge `resource` and `fetch` lists into one here instead of UI - if (msg.initiator !== ResourceType.FETCH) { + if (msg.initiator !== ResourceType.FETCH && msg.initiator !== ResourceType.XHR) { this.lists.lists.resource.insert(getResourceFromResourceTiming(msg, this.sessionStart)) } break; From 270b150f8841d972ec6f509694d58922bf35a185 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 3 Mar 2023 14:24:07 +0100 Subject: [PATCH 177/218] fix(ui) - graphql crash --- .../components/Session_/GraphQL/GraphQL.tsx | 39 +++++++++---------- 1 file changed, 18 insertions(+), 21 deletions(-) diff --git a/frontend/app/components/Session_/GraphQL/GraphQL.tsx b/frontend/app/components/Session_/GraphQL/GraphQL.tsx index 6f89a5ec1..11f46d318 100644 --- a/frontend/app/components/Session_/GraphQL/GraphQL.tsx +++ b/frontend/app/components/Session_/GraphQL/GraphQL.tsx @@ -12,29 +12,8 @@ function renderDefaultStatus() { return '2xx-3xx'; } -export function renderName(r: Record) { - const { player } = React.useContext(PlayerContext); - - return ( -
-
{r.operationName}
- -
- ); -} - function GraphQL() { const { player, store } = React.useContext(PlayerContext); - const { graphqlList: list, graphqlListNow: listNow, time, livePlay } = store.get(); const defaultState = { @@ -52,6 +31,24 @@ function GraphQL() { const [state, setState] = React.useState(defaultState); + function renderName(r: Record) { + return ( +
+
{r.operationName}
+ +
+ ); + } + const filterList = (list: any, value: string) => { const filterRE = getRE(value, 'i'); From d8212351552f3547bbd894feedd5050f163c582b Mon Sep 17 00:00:00 2001 From: Alex Kaminskii Date: Fri, 3 Mar 2023 17:03:06 +0100 Subject: [PATCH 178/218] fix(tracker-assist):5.0.1:check presense of window before using it at the top level --- tracker/tracker-assist/package.json | 2 +- tracker/tracker-assist/src/RemoteControl.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tracker/tracker-assist/package.json b/tracker/tracker-assist/package.json index aaa80429d..4277e2e36 100644 --- a/tracker/tracker-assist/package.json +++ b/tracker/tracker-assist/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-assist", "description": "Tracker plugin for screen assistance through the WebRTC", - "version": "5.0.0", + "version": "5.0.1", "keywords": [ "WebRTC", "assistance", diff --git a/tracker/tracker-assist/src/RemoteControl.ts b/tracker/tracker-assist/src/RemoteControl.ts index fb9850437..017918238 100644 --- a/tracker/tracker-assist/src/RemoteControl.ts +++ b/tracker/tracker-assist/src/RemoteControl.ts @@ -11,7 +11,7 @@ export enum RCStatus { let setInputValue = function(this: HTMLInputElement | HTMLTextAreaElement, value: string) { this.value = value } -const nativeInputValueDescriptor = Object.getOwnPropertyDescriptor(window.HTMLInputElement.prototype, 'value') +const nativeInputValueDescriptor = typeof window !== 'undefined' && Object.getOwnPropertyDescriptor(window.HTMLInputElement.prototype, 'value') if (nativeInputValueDescriptor && nativeInputValueDescriptor.set) { setInputValue = nativeInputValueDescriptor.set } From e1e139bfaaf6009ad2c4eda48b8f04fd25648e00 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 3 Mar 2023 18:21:32 +0100 Subject: [PATCH 179/218] feat(chalice): fixed create&add card to dashboard at the same time --- api/routers/core.py | 103 ++++++++++++++++++++------------------------ 1 file changed, 47 insertions(+), 56 deletions(-) diff --git a/api/routers/core.py b/api/routers/core.py index 67e33a50a..a23e437ea 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -63,25 +63,25 @@ async def logout_user(response: Response, context: schemas.CurrentContext = Depe @app.post('/{projectId}/sessions/search', tags=["sessions"]) async def sessions_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id) return {'data': data} @app.post('/{projectId}/sessions/search/ids', tags=["sessions"]) async def session_ids_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True) return {'data': data} @app.get('/{projectId}/events/search', tags=["events"]) async def events_search(projectId: int, q: str, - type: Union[schemas.FilterType, schemas.EventType, - schemas.PerformanceEventType, schemas.FetchFilterType, - schemas.GraphqlFilterType, str] = None, - key: str = None, source: str = None, live: bool = False, - context: schemas.CurrentContext = Depends(OR_context)): + type: Union[schemas.FilterType, schemas.EventType, + schemas.PerformanceEventType, schemas.FetchFilterType, + schemas.GraphqlFilterType, str] = None, + key: str = None, source: str = None, live: bool = False, + context: schemas.CurrentContext = Depends(OR_context)): if len(q) == 0: return {"data": []} if live: @@ -117,8 +117,8 @@ async def get_integrations_status(projectId: int, context: schemas.CurrentContex @app.post('/{projectId}/integrations/{integration}/notify/{webhookId}/{source}/{sourceId}', tags=["integrations"]) async def integration_notify(projectId: int, integration: str, webhookId: int, source: str, sourceId: str, - data: schemas.IntegrationNotificationSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + data: schemas.IntegrationNotificationSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): comment = None if data.comment: comment = data.comment @@ -151,7 +151,7 @@ async def get_sentry(projectId: int, context: schemas.CurrentContext = Depends(O @app.post('/{projectId}/integrations/sentry', tags=["integrations"]) async def add_edit_sentry(projectId: int, data: schemas.SentrySchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_sentry.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} @@ -177,7 +177,7 @@ async def get_datadog(projectId: int, context: schemas.CurrentContext = Depends( @app.post('/{projectId}/integrations/datadog', tags=["integrations"]) async def add_edit_datadog(projectId: int, data: schemas.DatadogSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_datadog.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} @@ -198,7 +198,7 @@ async def get_stackdriver(projectId: int, context: schemas.CurrentContext = Depe @app.post('/{projectId}/integrations/stackdriver', tags=["integrations"]) async def add_edit_stackdriver(projectId: int, data: schemas.StackdriverSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_stackdriver.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} @@ -219,7 +219,7 @@ async def get_newrelic(projectId: int, context: schemas.CurrentContext = Depends @app.post('/{projectId}/integrations/newrelic', tags=["integrations"]) async def add_edit_newrelic(projectId: int, data: schemas.NewrelicSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_newrelic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} @@ -240,7 +240,7 @@ async def get_rollbar(projectId: int, context: schemas.CurrentContext = Depends( @app.post('/{projectId}/integrations/rollbar', tags=["integrations"]) async def add_edit_rollbar(projectId: int, data: schemas.RollbarSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_rollbar.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} @@ -251,7 +251,7 @@ async def delete_datadog(projectId: int, context: schemas.CurrentContext = Depen @app.post('/integrations/bugsnag/list_projects', tags=["integrations"]) async def list_projects_bugsnag(data: schemas.BugsnagBasicSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_bugsnag.list_projects(auth_token=data.authorizationToken)} @@ -267,7 +267,7 @@ async def get_bugsnag(projectId: int, context: schemas.CurrentContext = Depends( @app.post('/{projectId}/integrations/bugsnag', tags=["integrations"]) async def add_edit_bugsnag(projectId: int, data: schemas.BugsnagSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_bugsnag.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} @@ -278,7 +278,7 @@ async def delete_bugsnag(projectId: int, context: schemas.CurrentContext = Depen @app.post('/integrations/cloudwatch/list_groups', tags=["integrations"]) async def list_groups_cloudwatch(data: schemas.CloudwatchBasicSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_cloudwatch.list_log_groups(aws_access_key_id=data.awsAccessKeyId, aws_secret_access_key=data.awsSecretAccessKey, region=data.region)} @@ -296,7 +296,7 @@ async def get_cloudwatch(projectId: int, context: schemas.CurrentContext = Depen @app.post('/{projectId}/integrations/cloudwatch', tags=["integrations"]) async def add_edit_cloudwatch(projectId: int, data: schemas.CloudwatchSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_cloudwatch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} @@ -317,13 +317,13 @@ async def get_elasticsearch(projectId: int, context: schemas.CurrentContext = De @app.post('/integrations/elasticsearch/test', tags=["integrations"]) async def test_elasticsearch_connection(data: schemas.ElasticsearchBasicSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_elasticsearch.ping(tenant_id=context.tenant_id, **data.dict())} @app.post('/{projectId}/integrations/elasticsearch', tags=["integrations"]) async def add_edit_elasticsearch(projectId: int, data: schemas.ElasticsearchSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return { "data": log_tool_elasticsearch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} @@ -345,7 +345,7 @@ async def get_sumologic(projectId: int, context: schemas.CurrentContext = Depend @app.post('/{projectId}/integrations/sumologic', tags=["integrations"]) async def add_edit_sumologic(projectId: int, data: schemas.SumologicSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_sumologic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} @@ -385,7 +385,7 @@ async def get_integration_status_github(context: schemas.CurrentContext = Depend @app.post('/integrations/jira', tags=["integrations"]) async def add_edit_jira_cloud(data: schemas.JiraSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): if not data.url.endswith('atlassian.net'): return {"errors": ["url must be a valid JIRA URL (example.atlassian.net)"]} error, integration = integrations_manager.get_integration(tool=integration_jira_cloud.PROVIDER, @@ -398,7 +398,7 @@ async def add_edit_jira_cloud(data: schemas.JiraSchema = Body(...), @app.post('/integrations/github', tags=["integrations"]) async def add_edit_github(data: schemas.GithubSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER, tenant_id=context.tenant_id, user_id=context.user_id) @@ -472,8 +472,8 @@ async def get_all_assignments(projectId: int, context: schemas.CurrentContext = @app.post('/{projectId}/sessions/{sessionId}/assign/projects/{integrationProjectId}', tags=["assignment"]) async def create_issue_assignment(projectId: int, sessionId: int, integrationProjectId, - data: schemas.AssignmentSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + data: schemas.AssignmentSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.create_new_assignment(tenant_id=context.tenant_id, project_id=projectId, session_id=sessionId, creator_id=context.user_id, assignee=data.assignee, @@ -494,7 +494,7 @@ async def get_gdpr(projectId: int, context: schemas.CurrentContext = Depends(OR_ @app.post('/{projectId}/gdpr', tags=["projects", "gdpr"]) async def edit_gdpr(projectId: int, data: schemas.GdprSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): result = projects.edit_gdpr(project_id=projectId, gdpr=data.dict()) if "errors" in result: return result @@ -515,19 +515,19 @@ async def get_metadata(projectId: int, context: schemas.CurrentContext = Depends @app.post('/{projectId}/metadata/list', tags=["metadata"]) async def add_edit_delete_metadata(projectId: int, data: schemas.MetadataListSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return metadata.add_edit_delete(tenant_id=context.tenant_id, project_id=projectId, new_metas=data.list) @app.post('/{projectId}/metadata', tags=["metadata"]) async def add_metadata(projectId: int, data: schemas.MetadataBasicSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return metadata.add(tenant_id=context.tenant_id, project_id=projectId, new_name=data.key) @app.post('/{projectId}/metadata/{index}', tags=["metadata"]) async def edit_metadata(projectId: int, index: int, data: schemas.MetadataBasicSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return metadata.edit(tenant_id=context.tenant_id, project_id=projectId, index=index, new_name=data.key) @@ -560,7 +560,7 @@ async def get_capture_status(projectId: int, context: schemas.CurrentContext = D @app.post('/{projectId}/sample_rate', tags=["projects"]) async def update_capture_status(projectId: int, data: schemas.SampleRateSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": projects.update_capture_status(project_id=projectId, changes=data.dict())} @@ -581,7 +581,7 @@ async def errors_merge(context: schemas.CurrentContext = Depends(OR_context)): @app.post('/{projectId}/alerts', tags=["alerts"]) async def create_alert(projectId: int, data: schemas.AlertSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return alerts.create(project_id=projectId, data=data) @@ -603,7 +603,7 @@ async def get_alert(projectId: int, alertId: int, context: schemas.CurrentContex @app.post('/{projectId}/alerts/{alertId}', tags=["alerts"]) async def update_alert(projectId: int, alertId: int, data: schemas.AlertSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return alerts.update(id=alertId, data=data) @@ -615,7 +615,7 @@ async def delete_alert(projectId: int, alertId: int, context: schemas.CurrentCon @app_apikey.put('/{projectKey}/sourcemaps/', tags=["sourcemaps"]) @app_apikey.put('/{projectKey}/sourcemaps', tags=["sourcemaps"]) async def sign_sourcemap_for_upload(projectKey: str, data: schemas.SourcemapUploadPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): project_id = projects.get_internal_project_id(projectKey) if project_id is None: return {"errors": ["Project not found."]} @@ -630,7 +630,7 @@ async def get_weekly_report_config(context: schemas.CurrentContext = Depends(OR_ @app.post('/config/weekly_report', tags=["weekly report config"]) async def edit_weekly_report_config(data: schemas.WeeklyReportConfigSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": weekly_report.edit_config(user_id=context.user_id, weekly_report=data.weekly_report)} @@ -652,14 +652,14 @@ async def get_sessions_live(projectId: int, userId: str = None, context: schemas @app.post('/{projectId}/assist/sessions', tags=["assist"]) async def sessions_live(projectId: int, data: schemas.LiveSessionsSearchPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = assist.get_live_sessions_ws(projectId, body=data) return {'data': data} @app.post('/{projectId}/mobile/{sessionId}/urls', tags=['mobile']) async def mobile_signe(projectId: int, sessionId: int, data: schemas.MobileSignPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": mobile.sign_keys(project_id=projectId, session_id=sessionId, keys=data.keys)} @@ -671,7 +671,7 @@ async def signup_handler(data: schemas.UserSignupSchema = Body(...)): @app.post('/projects', tags=['projects']) async def create_project(data: schemas.CreateProjectSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return projects.create(tenant_id=context.tenant_id, user_id=context.user_id, data=data) @@ -686,7 +686,7 @@ async def get_project(projectId: int, context: schemas.CurrentContext = Depends( @app.put('/projects/{projectId}', tags=['projects']) async def edit_project(projectId: int, data: schemas.CreateProjectSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return projects.edit(tenant_id=context.tenant_id, user_id=context.user_id, data=data, project_id=projectId) @@ -705,7 +705,7 @@ async def generate_new_tenant_token(context: schemas.CurrentContext = Depends(OR @app.post('/client', tags=['client']) @app.put('/client', tags=['client']) async def edit_client(data: schemas.UpdateTenantSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return tenants.update(tenant_id=context.tenant_id, user_id=context.user_id, data=data) @@ -726,7 +726,7 @@ async def view_notifications(notificationId: int, context: schemas.CurrentContex @app.post('/notifications/view', tags=['notifications']) async def batch_view_notifications(data: schemas.NotificationsViewSchema, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": notifications.view_notification(notification_ids=data.ids, startTimestamp=data.startTimestamp, endTimestamp=data.endTimestamp, @@ -776,7 +776,7 @@ async def delete_slack_integration(webhookId: int, context: schemas.CurrentConte @app.put('/webhooks', tags=["webhooks"]) async def add_edit_webhook(data: schemas.CreateEditWebhookSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": webhook.add_edit(tenant_id=context.tenant_id, data=data.dict(), replace_none=True)} @@ -812,7 +812,7 @@ async def generate_new_user_token(context: schemas.CurrentContext = Depends(OR_c @app.post('/account/password', tags=["account"]) async def change_client_password(data: schemas.EditUserPasswordSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return users.change_password(email=context.email, old_password=data.old_password, new_password=data.new_password, tenant_id=context.tenant_id, user_id=context.user_id) @@ -820,7 +820,7 @@ async def change_client_password(data: schemas.EditUserPasswordSchema = Body(... @app.post('/{projectId}/saved_search', tags=["savedSearch"]) async def add_saved_search(projectId: int, data: schemas.SavedSearchSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return saved_search.create(project_id=projectId, user_id=context.user_id, data=data) @@ -836,7 +836,7 @@ async def get_saved_search(projectId: int, search_id: int, context: schemas.Curr @app.post('/{projectId}/saved_search/{search_id}', tags=["savedSearch"]) async def update_saved_search(projectId: int, search_id: int, data: schemas.SavedSearchSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return {"data": saved_search.update(user_id=context.user_id, search_id=search_id, data=data, project_id=projectId)} @@ -862,7 +862,7 @@ async def get_msteams_channels(context: schemas.CurrentContext = Depends(OR_cont @app.post('/integrations/msteams', tags=['integrations']) async def add_msteams_integration(data: schemas.AddCollaborationSchema, - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): n = MSTeams.add(tenant_id=context.tenant_id, data=data) if n is None: return { @@ -874,7 +874,7 @@ async def add_msteams_integration(data: schemas.AddCollaborationSchema, @app.post('/integrations/msteams/{webhookId}', tags=['integrations']) async def edit_msteams_integration(webhookId: int, data: schemas.EditCollaborationSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): if len(data.url) > 0: old = MSTeams.get_integration(tenant_id=context.tenant_id, integration_id=webhookId) if not old: @@ -894,15 +894,6 @@ async def delete_msteams_integration(webhookId: int, context: schemas.CurrentCon return webhook.delete(tenant_id=context.tenant_id, webhook_id=webhookId) -@public_app.get('/general_stats', tags=["private"], include_in_schema=False) -async def get_general_stats(): - return {"data": {"sessions:": sessions.count_all()}} - - @public_app.get('/', tags=["health"]) -@public_app.post('/', tags=["health"]) -@public_app.put('/', tags=["health"]) -@public_app.delete('/', tags=["health"]) async def health_check(): - return {"data": {"stage": f"live {config('version_number', default='')}", - "internalCrons": config("LOCAL_CRONS", default=False, cast=bool)}} + return {} From 740fc4aa2e06972c6b2754811a9324e56e41f871 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Sun, 5 Mar 2023 15:39:00 +0100 Subject: [PATCH 180/218] fix(script): Race condition for migration version Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay/files/dbops.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/helmcharts/openreplay/files/dbops.sh b/scripts/helmcharts/openreplay/files/dbops.sh index 6402a3c88..8ac336d21 100644 --- a/scripts/helmcharts/openreplay/files/dbops.sh +++ b/scripts/helmcharts/openreplay/files/dbops.sh @@ -39,10 +39,10 @@ function migration() { echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }' } all_versions=(`ls -l db/init_dbs/$db | grep -E ^d | grep -v create | awk '{print $NF}'`) - migration_versions=(`for ver in ${all_versions[*]}; do if [[ $(normalise_version $ver) > $(normalise_version "${PREVIOUS_APP_VERSION}") ]]; then echo $ver; fi; done`) + migration_versions=(`for ver in ${all_versions[*]}; do if [[ $(normalise_version $ver) > $(normalise_version "${PREVIOUS_APP_VERSION}") ]]; then echo $ver; fi; done | sort -V`) echo "Migration version: ${migration_versions[*]}" # Can't pass the space seperated array to ansible for migration. So joining them with , - joined_migration_versions=$(IFS=, ; echo "${migration_versions[*]}") + joined_migration_versions=$(IFS=, ; printf '%s\n' "${migration_versions[@]}" | tac | tr '\n' ' '; echo) cd - From 3639a9f1051fcc83908344bd4eef61de64f5ec80 Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Sun, 5 Mar 2023 15:40:49 +0100 Subject: [PATCH 181/218] fix(script): Race condition for migration version (#1010) Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay/files/dbops.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/helmcharts/openreplay/files/dbops.sh b/scripts/helmcharts/openreplay/files/dbops.sh index 6402a3c88..8ac336d21 100644 --- a/scripts/helmcharts/openreplay/files/dbops.sh +++ b/scripts/helmcharts/openreplay/files/dbops.sh @@ -39,10 +39,10 @@ function migration() { echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }' } all_versions=(`ls -l db/init_dbs/$db | grep -E ^d | grep -v create | awk '{print $NF}'`) - migration_versions=(`for ver in ${all_versions[*]}; do if [[ $(normalise_version $ver) > $(normalise_version "${PREVIOUS_APP_VERSION}") ]]; then echo $ver; fi; done`) + migration_versions=(`for ver in ${all_versions[*]}; do if [[ $(normalise_version $ver) > $(normalise_version "${PREVIOUS_APP_VERSION}") ]]; then echo $ver; fi; done | sort -V`) echo "Migration version: ${migration_versions[*]}" # Can't pass the space seperated array to ansible for migration. So joining them with , - joined_migration_versions=$(IFS=, ; echo "${migration_versions[*]}") + joined_migration_versions=$(IFS=, ; printf '%s\n' "${migration_versions[@]}" | tac | tr '\n' ' '; echo) cd - From 0037b0376fbba20f3a3055704e5b4cc29e905851 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Sun, 5 Mar 2023 15:47:29 +0100 Subject: [PATCH 182/218] fix(migration): regression fix Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay/files/dbops.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/helmcharts/openreplay/files/dbops.sh b/scripts/helmcharts/openreplay/files/dbops.sh index 8ac336d21..0df923178 100644 --- a/scripts/helmcharts/openreplay/files/dbops.sh +++ b/scripts/helmcharts/openreplay/files/dbops.sh @@ -42,7 +42,7 @@ function migration() { migration_versions=(`for ver in ${all_versions[*]}; do if [[ $(normalise_version $ver) > $(normalise_version "${PREVIOUS_APP_VERSION}") ]]; then echo $ver; fi; done | sort -V`) echo "Migration version: ${migration_versions[*]}" # Can't pass the space seperated array to ansible for migration. So joining them with , - joined_migration_versions=$(IFS=, ; printf '%s\n' "${migration_versions[@]}" | tac | tr '\n' ' '; echo) + joined_migration_versions=$(IFS=, ; echo "${migration_versions[*]}") cd - From 849aab89a5003a6dee7fe0b0e9ea4c2c4f05193d Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Sun, 5 Mar 2023 16:16:20 +0100 Subject: [PATCH 183/218] fix(migration): regression (#1011) fix Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay/files/dbops.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/helmcharts/openreplay/files/dbops.sh b/scripts/helmcharts/openreplay/files/dbops.sh index 8ac336d21..0df923178 100644 --- a/scripts/helmcharts/openreplay/files/dbops.sh +++ b/scripts/helmcharts/openreplay/files/dbops.sh @@ -42,7 +42,7 @@ function migration() { migration_versions=(`for ver in ${all_versions[*]}; do if [[ $(normalise_version $ver) > $(normalise_version "${PREVIOUS_APP_VERSION}") ]]; then echo $ver; fi; done | sort -V`) echo "Migration version: ${migration_versions[*]}" # Can't pass the space seperated array to ansible for migration. So joining them with , - joined_migration_versions=$(IFS=, ; printf '%s\n' "${migration_versions[@]}" | tac | tr '\n' ' '; echo) + joined_migration_versions=$(IFS=, ; echo "${migration_versions[*]}") cd - From 82cb2e6ad99050586fb047d1f8a08bb82ffdef3f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Sun, 5 Mar 2023 19:29:30 +0100 Subject: [PATCH 184/218] feat(chalice): fixed funnels --- api/chalicelib/core/significance.py | 8 ++++---- ee/api/chalicelib/core/significance.py | 8 ++++---- ee/api/chalicelib/core/significance_exp.py | 8 ++++---- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index 937f00af2..64028a8df 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -574,10 +574,10 @@ def get_top_insights(filter_d, project_id): # Obtain the first part of the output stages_list = get_stages(stages, rows) # Obtain the second part of the output - n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, - first_stage=filter_d.get("firstStage"), - last_stage=filter_d.get("lastStage"), - drop_only=True) + total_drop_due_to_issues = get_issues(stages, rows, + first_stage=filter_d.get("firstStage"), + last_stage=filter_d.get("lastStage"), + drop_only=True) return stages_list, total_drop_due_to_issues diff --git a/ee/api/chalicelib/core/significance.py b/ee/api/chalicelib/core/significance.py index ac6eef548..ae1f0c867 100644 --- a/ee/api/chalicelib/core/significance.py +++ b/ee/api/chalicelib/core/significance.py @@ -580,10 +580,10 @@ def get_top_insights(filter_d, project_id): # Obtain the first part of the output stages_list = get_stages(stages, rows) # Obtain the second part of the output - n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, - first_stage=filter_d.get("firstStage"), - last_stage=filter_d.get("lastStage"), - drop_only=True) + total_drop_due_to_issues = get_issues(stages, rows, + first_stage=filter_d.get("firstStage"), + last_stage=filter_d.get("lastStage"), + drop_only=True) return stages_list, total_drop_due_to_issues diff --git a/ee/api/chalicelib/core/significance_exp.py b/ee/api/chalicelib/core/significance_exp.py index ac6eef548..ae1f0c867 100644 --- a/ee/api/chalicelib/core/significance_exp.py +++ b/ee/api/chalicelib/core/significance_exp.py @@ -580,10 +580,10 @@ def get_top_insights(filter_d, project_id): # Obtain the first part of the output stages_list = get_stages(stages, rows) # Obtain the second part of the output - n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, - first_stage=filter_d.get("firstStage"), - last_stage=filter_d.get("lastStage"), - drop_only=True) + total_drop_due_to_issues = get_issues(stages, rows, + first_stage=filter_d.get("firstStage"), + last_stage=filter_d.get("lastStage"), + drop_only=True) return stages_list, total_drop_due_to_issues From eb21604af428304eeef35644ef2032572db7867b Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Mon, 6 Mar 2023 09:45:39 +0100 Subject: [PATCH 185/218] chore(docker): Adding env variable for max file size Signed-off-by: rjshrjndrn --- backend/Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/backend/Dockerfile b/backend/Dockerfile index c7606559e..749900ba5 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -82,7 +82,9 @@ ENV TZ=UTC \ COMPRESSION_TYPE=zstd \ CH_USERNAME="default" \ CH_PASSWORD="" \ - CH_DATABASE="default" + CH_DATABASE="default" \ + # Max file size to process, default to 100MB + MAX_FILE_SIZE=100000000 RUN if [ "$SERVICE_NAME" = "http" ]; then \ From 39a0925abf979d323770e5999d6b188e4473241d Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 6 Mar 2023 10:59:08 +0100 Subject: [PATCH 186/218] fix(ui) - jwt set --- frontend/app/components/Login/Login.js | 1 - 1 file changed, 1 deletion(-) diff --git a/frontend/app/components/Login/Login.js b/frontend/app/components/Login/Login.js index da2206bdf..b51970036 100644 --- a/frontend/app/components/Login/Login.js +++ b/frontend/app/components/Login/Login.js @@ -40,7 +40,6 @@ class Login extends React.Component { const jwt = params.get('jwt'); if (jwt) { this.props.setJwt(jwt); - window.location.href = '/'; } } From ac810bc3e41f8c7f8fdd123f3078367930acb413 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 6 Mar 2023 10:59:08 +0100 Subject: [PATCH 187/218] fix(ui) - jwt set --- frontend/app/components/Login/Login.js | 1 - 1 file changed, 1 deletion(-) diff --git a/frontend/app/components/Login/Login.js b/frontend/app/components/Login/Login.js index da2206bdf..b51970036 100644 --- a/frontend/app/components/Login/Login.js +++ b/frontend/app/components/Login/Login.js @@ -40,7 +40,6 @@ class Login extends React.Component { const jwt = params.get('jwt'); if (jwt) { this.props.setJwt(jwt); - window.location.href = '/'; } } From a74bfa1b61d116c497f17cd32a12a13eb0692e7a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 6 Mar 2023 14:32:34 +0100 Subject: [PATCH 188/218] chore(actions): change actions --- .github/workflows/alerts-ee.yaml | 3 ++- .github/workflows/alerts.yaml | 3 ++- .github/workflows/api-ee.yaml | 3 ++- .github/workflows/api.yaml | 3 ++- .github/workflows/assist-ee.yaml | 1 + .github/workflows/assist.yaml | 1 + .github/workflows/crons-ee.yaml | 3 ++- .github/workflows/peers-ee.yaml | 1 + .github/workflows/peers.yaml | 1 + .github/workflows/sourcemaps-reader.yaml | 1 + 10 files changed, 15 insertions(+), 5 deletions(-) diff --git a/.github/workflows/alerts-ee.yaml b/.github/workflows/alerts-ee.yaml index 10482a7cb..4c1d3b6c4 100644 --- a/.github/workflows/alerts-ee.yaml +++ b/.github/workflows/alerts-ee.yaml @@ -8,7 +8,8 @@ on: default: 'false' push: branches: - - api-v1.10.0 + - dev + - api-* paths: - "ee/api/**" - "api/**" diff --git a/.github/workflows/alerts.yaml b/.github/workflows/alerts.yaml index 539cc5e65..a24f2b855 100644 --- a/.github/workflows/alerts.yaml +++ b/.github/workflows/alerts.yaml @@ -8,7 +8,8 @@ on: default: 'false' push: branches: - - api-v1.10.0 + - dev + - api-* paths: - "api/**" - "!api/.gitignore" diff --git a/.github/workflows/api-ee.yaml b/.github/workflows/api-ee.yaml index b2a31f276..f9a1730f1 100644 --- a/.github/workflows/api-ee.yaml +++ b/.github/workflows/api-ee.yaml @@ -8,7 +8,8 @@ on: default: 'false' push: branches: - - api-v1.10.0 + - dev + - api-* paths: - "ee/api/**" - "api/**" diff --git a/.github/workflows/api.yaml b/.github/workflows/api.yaml index 26d59ff87..8e2f7fa7b 100644 --- a/.github/workflows/api.yaml +++ b/.github/workflows/api.yaml @@ -8,7 +8,8 @@ on: default: 'false' push: branches: - - api-v1.10.0 + - dev + - api-* paths: - "api/**" - "!api/.gitignore" diff --git a/.github/workflows/assist-ee.yaml b/.github/workflows/assist-ee.yaml index 76dcc4a2d..e3f03ef5f 100644 --- a/.github/workflows/assist-ee.yaml +++ b/.github/workflows/assist-ee.yaml @@ -4,6 +4,7 @@ on: push: branches: - dev + - api-* paths: - "ee/utilities/**" - "utilities/**" diff --git a/.github/workflows/assist.yaml b/.github/workflows/assist.yaml index 65ca0348c..03ee1df5f 100644 --- a/.github/workflows/assist.yaml +++ b/.github/workflows/assist.yaml @@ -4,6 +4,7 @@ on: push: branches: - dev + - api-* paths: - "utilities/**" - "!utilities/.gitignore" diff --git a/.github/workflows/crons-ee.yaml b/.github/workflows/crons-ee.yaml index 762dae33e..77c098e4e 100644 --- a/.github/workflows/crons-ee.yaml +++ b/.github/workflows/crons-ee.yaml @@ -8,7 +8,8 @@ on: default: 'false' push: branches: - - api-v1.10.0 + - dev + - api-* paths: - "ee/api/**" - "api/**" diff --git a/.github/workflows/peers-ee.yaml b/.github/workflows/peers-ee.yaml index 5db7436da..dcd003e93 100644 --- a/.github/workflows/peers-ee.yaml +++ b/.github/workflows/peers-ee.yaml @@ -4,6 +4,7 @@ on: push: branches: - dev + - api-* paths: - "ee/peers/**" - "peers/**" diff --git a/.github/workflows/peers.yaml b/.github/workflows/peers.yaml index 7b2a715d8..2de0ae3ed 100644 --- a/.github/workflows/peers.yaml +++ b/.github/workflows/peers.yaml @@ -4,6 +4,7 @@ on: push: branches: - dev + - api-* paths: - "peers/**" - "!peers/.gitignore" diff --git a/.github/workflows/sourcemaps-reader.yaml b/.github/workflows/sourcemaps-reader.yaml index 095a70784..f0059da40 100644 --- a/.github/workflows/sourcemaps-reader.yaml +++ b/.github/workflows/sourcemaps-reader.yaml @@ -4,6 +4,7 @@ on: push: branches: - dev + - api-* paths: - "sourcemap-reader/**" - "!sourcemap-reader/.gitignore" From 850ee04787a827b856f99f776082006767c2492c Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Mon, 6 Mar 2023 12:26:04 +0100 Subject: [PATCH 189/218] change(player): decode state messages only on render --- .../components/Session_/Storage/Storage.tsx | 54 +++++++++++++++---- frontend/app/player/web/MessageManager.ts | 39 +++++--------- frontend/app/player/web/WebPlayer.ts | 6 +++ 3 files changed, 63 insertions(+), 36 deletions(-) diff --git a/frontend/app/components/Session_/Storage/Storage.tsx b/frontend/app/components/Session_/Storage/Storage.tsx index f955912e7..587d5bc31 100644 --- a/frontend/app/components/Session_/Storage/Storage.tsx +++ b/frontend/app/components/Session_/Storage/Storage.tsx @@ -12,6 +12,8 @@ import BottomBlock from '../BottomBlock/index'; import DiffRow from './DiffRow'; import cn from 'classnames'; import stl from './storage.module.css'; +import logger from "App/logger"; +import { toJS } from 'mobx' function getActionsName(type: string) { switch (type) { @@ -23,10 +25,19 @@ function getActionsName(type: string) { } } +const storageDecodeKeys = { + [STORAGE_TYPES.REDUX]: ['state', 'action'], + [STORAGE_TYPES.NGRX]: ['state', 'action'], + [STORAGE_TYPES.VUEX]: ['state', 'mutation'], + [STORAGE_TYPES.ZUSTAND]: ['state', 'mutation'], + [STORAGE_TYPES.MOBX]: ['payload'], + [STORAGE_TYPES.NONE]: ['state, action', 'payload', 'mutation'], +} interface Props { hideHint: (args: string) => void; hintIsHidden: boolean; } + function Storage(props: Props) { const lastBtnRef = React.useRef(); const [showDiffs, setShowDiffs] = React.useState(false); @@ -37,6 +48,24 @@ function Storage(props: Props) { const list = selectStorageList(state); const type = selectStorageType(state); + const decodeMessage = (msg: any) => { + const decoded = {}; + const pureMSG = toJS(msg) + const keys = storageDecodeKeys[type]; + try { + keys.forEach(key => { + if (pureMSG[key]) { + // @ts-ignore TODO: types for decoder + decoded[key] = player.decodeMessage(pureMSG[key]); + } + }); + } catch (e) { + logger.error("Error on message decoding: ", e, pureMSG); + return null; + } + return { ...pureMSG, ...decoded }; + } + const focusNextButton = () => { if (lastBtnRef.current) { lastBtnRef.current.focus(); @@ -106,27 +135,30 @@ function Storage(props: Props) { player.jump(list[listNow.length].time); }; - const renderItem = (item: Record, i: number, prevItem: Record) => { + const renderItem = (item: Record, i: number, prevItem?: Record) => { let src; let name; + const prevItemD = prevItem ? decodeMessage(prevItem) : undefined + const itemD = decodeMessage(item) + switch (type) { case STORAGE_TYPES.REDUX: case STORAGE_TYPES.NGRX: - src = item.action; + src = itemD.action; name = src && src.type; break; case STORAGE_TYPES.VUEX: - src = item.mutation; + src = itemD.mutation; name = src && src.type; break; case STORAGE_TYPES.MOBX: - src = item.payload; + src = itemD.payload; name = `@${item.type} ${src && src.type}`; break; case STORAGE_TYPES.ZUSTAND: src = null; - name = item.mutation.join(''); + name = itemD.mutation.join(''); } if (src !== null && !showDiffs) { @@ -144,7 +176,7 @@ function Storage(props: Props) {
) : ( <> - {renderDiff(item, prevItem)} + {renderDiff(itemD, prevItemD)}
{typeof item.duration === 'number' && ( -
{formatMs(item.duration)}
+
{formatMs(itemD.duration)}
)}
{i + 1 < listNow.length && ( - )} @@ -281,7 +313,11 @@ function Storage(props: Props) { {'Empty state.'}
) : ( - + )}
)} diff --git a/frontend/app/player/web/MessageManager.ts b/frontend/app/player/web/MessageManager.ts index 330d1c861..dced92641 100644 --- a/frontend/app/player/web/MessageManager.ts +++ b/frontend/app/player/web/MessageManager.ts @@ -107,7 +107,7 @@ export default class MessageManager { private scrollManager: ListWalker = new ListWalker(); - private readonly decoder = new Decoder(); + public readonly decoder = new Decoder(); private readonly lists: Lists; private activityManager: ActivityManager | null = null; @@ -406,39 +406,24 @@ export default class MessageManager { this.lists.lists.fetch.insert(getResourceFromNetworkRequest(msg, this.sessionStart)) break; case MType.Redux: - decoded = this.decodeStateMessage(msg, ["state", "action"]); - logger.log('redux', decoded) - if (decoded != null) { - this.lists.lists.redux.append(decoded); - } + logger.log('redux', msg) + this.lists.lists.redux.append(msg); break; case MType.NgRx: - decoded = this.decodeStateMessage(msg, ["state", "action"]); - logger.log('ngrx', decoded) - if (decoded != null) { - this.lists.lists.ngrx.append(decoded); - } + logger.log('ngrx', msg) + this.lists.lists.ngrx.append(msg); break; case MType.Vuex: - decoded = this.decodeStateMessage(msg, ["state", "mutation"]); - logger.log('vuex', decoded) - if (decoded != null) { - this.lists.lists.vuex.append(decoded); - } + logger.log('vuex', msg) + this.lists.lists.vuex.append(msg); break; case MType.Zustand: - decoded = this.decodeStateMessage(msg, ["state", "mutation"]) - logger.log('zustand', decoded) - if (decoded != null) { - this.lists.lists.zustand.append(decoded) - } + logger.log('zustand', msg) + this.lists.lists.zustand.append(msg) + break case MType.MobX: - decoded = this.decodeStateMessage(msg, ["payload"]); - logger.log('mobx', decoded) - - if (decoded != null) { - this.lists.lists.mobx.append(decoded); - } + logger.log('mobx', msg) + this.lists.lists.mobx.append(msg); break; case MType.GraphQl: this.lists.lists.graphql.append(msg); diff --git a/frontend/app/player/web/WebPlayer.ts b/frontend/app/player/web/WebPlayer.ts index d1a56f9fd..0889089ee 100644 --- a/frontend/app/player/web/WebPlayer.ts +++ b/frontend/app/player/web/WebPlayer.ts @@ -7,6 +7,7 @@ import MessageManager from './MessageManager' import InspectorController from './addons/InspectorController' import TargetMarker from './addons/TargetMarker' import Screen, { ScaleMode } from './Screen/Screen' +import { Message } from "Player/web/messages"; // export type State = typeof WebPlayer.INITIAL_STATE @@ -83,6 +84,11 @@ export default class WebPlayer extends Player { this.targetMarker.updateMarkedTargets() } + // delayed message decoding for state plugins + decodeMessage = (msg: Message) => { + return this.messageManager.decoder.decode(msg) + } + // Inspector & marker mark(e: Element) { this.inspectorController.marker?.mark(e) From ef83de38a2ed4e3b364ba29b85b0257465ae59f1 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Mon, 6 Mar 2023 12:40:45 +0100 Subject: [PATCH 190/218] change(player): use shallow object spread instead of recursive tojs --- frontend/app/components/Session_/Storage/Storage.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Session_/Storage/Storage.tsx b/frontend/app/components/Session_/Storage/Storage.tsx index 587d5bc31..25842840f 100644 --- a/frontend/app/components/Session_/Storage/Storage.tsx +++ b/frontend/app/components/Session_/Storage/Storage.tsx @@ -50,7 +50,7 @@ function Storage(props: Props) { const decodeMessage = (msg: any) => { const decoded = {}; - const pureMSG = toJS(msg) + const pureMSG = { ...msg } const keys = storageDecodeKeys[type]; try { keys.forEach(key => { From 65a66b9c59a8dfcc48fa0cae1e0b3536c15bcde8 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Mon, 6 Mar 2023 14:26:59 +0100 Subject: [PATCH 191/218] change(player): small bug fixes --- .../app/components/Session/Player/ReplayPlayer/PlayerInst.tsx | 3 --- frontend/app/components/Session/WebPlayer.tsx | 3 ++- frontend/app/components/Session_/Storage/Storage.tsx | 1 - 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/frontend/app/components/Session/Player/ReplayPlayer/PlayerInst.tsx b/frontend/app/components/Session/Player/ReplayPlayer/PlayerInst.tsx index 6a300cce7..097a57694 100644 --- a/frontend/app/components/Session/Player/ReplayPlayer/PlayerInst.tsx +++ b/frontend/app/components/Session/Player/ReplayPlayer/PlayerInst.tsx @@ -68,9 +68,6 @@ function Player(props: IProps) { playerContext.player.attach(parentElement); setAttached(true) } - if (isAttached && isReady) { - playerContext.player.play(); - } }, [isReady]); React.useEffect(() => { diff --git a/frontend/app/components/Session/WebPlayer.tsx b/frontend/app/components/Session/WebPlayer.tsx index 383b70c63..659dbe540 100644 --- a/frontend/app/components/Session/WebPlayer.tsx +++ b/frontend/app/components/Session/WebPlayer.tsx @@ -70,7 +70,8 @@ function WebPlayer(props: any) { if (showNoteModal) { contextValue.player.pause() } - if (activeTab !== 'Click Map' && !showNoteModal && isPlayerReady) { + + if (activeTab === '' && !showNoteModal && isPlayerReady) { contextValue.player && contextValue.player.play() } }, [activeTab, isPlayerReady, showNoteModal]) diff --git a/frontend/app/components/Session_/Storage/Storage.tsx b/frontend/app/components/Session_/Storage/Storage.tsx index 25842840f..7cabca865 100644 --- a/frontend/app/components/Session_/Storage/Storage.tsx +++ b/frontend/app/components/Session_/Storage/Storage.tsx @@ -13,7 +13,6 @@ import DiffRow from './DiffRow'; import cn from 'classnames'; import stl from './storage.module.css'; import logger from "App/logger"; -import { toJS } from 'mobx' function getActionsName(type: string) { switch (type) { From 1cd1f7231049e52681c631ed3c706f635e04d778 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Mon, 6 Mar 2023 15:26:02 +0100 Subject: [PATCH 192/218] change(player): only recompute list on change --- .../components/Session_/Storage/Storage.tsx | 58 +++++++++++++++++-- frontend/app/player/web/MessageManager.ts | 4 ++ frontend/app/player/web/WebPlayer.ts | 2 +- 3 files changed, 59 insertions(+), 5 deletions(-) diff --git a/frontend/app/components/Session_/Storage/Storage.tsx b/frontend/app/components/Session_/Storage/Storage.tsx index 7cabca865..8f7f3e92e 100644 --- a/frontend/app/components/Session_/Storage/Storage.tsx +++ b/frontend/app/components/Session_/Storage/Storage.tsx @@ -65,6 +65,12 @@ function Storage(props: Props) { return { ...pureMSG, ...decoded }; } + const decodedList = React.useMemo(() => { + return listNow.map(msg => { + return decodeMessage(msg) + }) + }, [listNow.length]) + const focusNextButton = () => { if (lastBtnRef.current) { lastBtnRef.current.focus(); @@ -138,8 +144,8 @@ function Storage(props: Props) { let src; let name; - const prevItemD = prevItem ? decodeMessage(prevItem) : undefined - const itemD = decodeMessage(item) + const itemD = item + const prevItemD = prevItem ? prevItem : undefined switch (type) { case STORAGE_TYPES.REDUX: @@ -322,8 +328,8 @@ function Storage(props: Props) { )}
- {listNow.map((item: Record, i: number) => - renderItem(item, i, i > 0 ? listNow[i - 1] : undefined) + {decodedList.map((item: Record, i: number) => + renderItem(item, i, i > 0 ? decodedList[i - 1] : undefined) )}
@@ -341,3 +347,47 @@ export default connect( hideHint, } )(observer(Storage)); + + +/** + * TODO: compute diff and only decode the required parts + * WIP example + * function useStorageDecryptedList(list: Record[], type: string, player: IWebPlayer) { + * const [decryptedList, setDecryptedList] = React.useState(list); + * const [listLength, setLength] = React.useState(list.length) + * + * const decodeMessage = (msg: any, type: StorageType) => { + * const decoded = {}; + * const pureMSG = { ...msg } + * const keys = storageDecodeKeys[type]; + * try { + * keys.forEach(key => { + * if (pureMSG[key]) { + * // @ts-ignore TODO: types for decoder + * decoded[key] = player.decodeMessage(pureMSG[key]); + * } + * }); + * } catch (e) { + * logger.error("Error on message decoding: ", e, pureMSG); + * return null; + * } + * return { ...pureMSG, ...decoded }; + * } + * + * React.useEffect(() => { + * if (list.length !== listLength) { + * const last = list[list.length - 1]._index; + * let diff; + * if (last < decryptedList[decryptedList.length - 1]._index) { + * + * } + * diff = list.filter(item => !decryptedList.includes(i => i._index === item._index)) + * const decryptedDiff = diff.map(item => { + * return player.decodeMessage(item) + * }) + * const result = + * } + * }, [list.length]) + * } + * + * */ \ No newline at end of file diff --git a/frontend/app/player/web/MessageManager.ts b/frontend/app/player/web/MessageManager.ts index dced92641..0d9aaee3d 100644 --- a/frontend/app/player/web/MessageManager.ts +++ b/frontend/app/player/web/MessageManager.ts @@ -463,6 +463,10 @@ export default class MessageManager { this.state.update({ messagesLoading, ready: !messagesLoading && !this.state.get().cssLoading }); } + decodeMessage(msg: Message) { + return this.decoder.decode(msg) + } + private setSize({ height, width }: { height: number, width: number }) { this.screen.scale({ height, width }); this.state.update({ width, height }); diff --git a/frontend/app/player/web/WebPlayer.ts b/frontend/app/player/web/WebPlayer.ts index 0889089ee..9ca769598 100644 --- a/frontend/app/player/web/WebPlayer.ts +++ b/frontend/app/player/web/WebPlayer.ts @@ -86,7 +86,7 @@ export default class WebPlayer extends Player { // delayed message decoding for state plugins decodeMessage = (msg: Message) => { - return this.messageManager.decoder.decode(msg) + return this.messageManager.decodeMessage(msg) } // Inspector & marker From 478bf3c69cef40e2d85dea0fe3e8a70cbf4559f9 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Mon, 6 Mar 2023 17:05:05 +0100 Subject: [PATCH 193/218] chore(init): Overwrite existing directory cho Signed-off-by: rjshrjndrn --- scripts/helmcharts/init.sh | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/scripts/helmcharts/init.sh b/scripts/helmcharts/init.sh index 69a6944b2..525a00a82 100644 --- a/scripts/helmcharts/init.sh +++ b/scripts/helmcharts/init.sh @@ -175,8 +175,14 @@ function main() { install_openreplay sudo mkdir -p /var/lib/openreplay sudo cp -f openreplay-cli /bin/openreplay - sudo cp -rf ../../../openreplay /var/lib/openreplay - sudo cp -f vars.yaml /var/lib/openreplay + [[ ! -d /var/lib/openreplay/openreplay ]] || { + cd /var/lib/openreplay/openreplay + date +%m-%d-%Y-%H%M%S | sudo tee -a /var/lib/openreplay/or_versions.txt + sudo git log -1 2>&1 | sudo tee -a /var/lib/openreplay/or_versions.txt + sudo rm -rf /var/lib/openreplay/openreplay + cd - + } + sudo cp -rf $(cd ../.. && pwd) /var/lib/openreplay/openreplay } } From 9979399fc8234b01914d904b12f031cca71581b6 Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Mon, 6 Mar 2023 17:06:33 +0100 Subject: [PATCH 194/218] chore(init): Overwrite existing directory (#1017) cho Signed-off-by: rjshrjndrn --- scripts/helmcharts/init.sh | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/scripts/helmcharts/init.sh b/scripts/helmcharts/init.sh index 69a6944b2..525a00a82 100644 --- a/scripts/helmcharts/init.sh +++ b/scripts/helmcharts/init.sh @@ -175,8 +175,14 @@ function main() { install_openreplay sudo mkdir -p /var/lib/openreplay sudo cp -f openreplay-cli /bin/openreplay - sudo cp -rf ../../../openreplay /var/lib/openreplay - sudo cp -f vars.yaml /var/lib/openreplay + [[ ! -d /var/lib/openreplay/openreplay ]] || { + cd /var/lib/openreplay/openreplay + date +%m-%d-%Y-%H%M%S | sudo tee -a /var/lib/openreplay/or_versions.txt + sudo git log -1 2>&1 | sudo tee -a /var/lib/openreplay/or_versions.txt + sudo rm -rf /var/lib/openreplay/openreplay + cd - + } + sudo cp -rf $(cd ../.. && pwd) /var/lib/openreplay/openreplay } } From 40f1eed94a7ee0a0d35fd13cfa4805c1f197f80e Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 6 Mar 2023 18:10:15 +0100 Subject: [PATCH 195/218] change(ui) - tracking options - data recording dropdown option order --- frontend/app/components/shared/CodeSnippet/CodeSnippet.tsx | 2 +- .../TrackingCodeModal/ProjectCodeSnippet/ProjectCodeSnippet.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/app/components/shared/CodeSnippet/CodeSnippet.tsx b/frontend/app/components/shared/CodeSnippet/CodeSnippet.tsx index c9629bf7a..116392534 100644 --- a/frontend/app/components/shared/CodeSnippet/CodeSnippet.tsx +++ b/frontend/app/components/shared/CodeSnippet/CodeSnippet.tsx @@ -4,8 +4,8 @@ import Highlight from 'react-highlight'; const inputModeOptions = [ { label: 'Record all inputs', value: 'plain' }, - { label: 'Ignore all inputs', value: 'obscured' }, { label: 'Obscure all inputs', value: 'hidden' }, + { label: 'Ignore all inputs', value: 'obscured' }, ]; const inputModeOptionsMap: any = {} diff --git a/frontend/app/components/shared/TrackingCodeModal/ProjectCodeSnippet/ProjectCodeSnippet.js b/frontend/app/components/shared/TrackingCodeModal/ProjectCodeSnippet/ProjectCodeSnippet.js index b8bd87b6a..4b51fc963 100644 --- a/frontend/app/components/shared/TrackingCodeModal/ProjectCodeSnippet/ProjectCodeSnippet.js +++ b/frontend/app/components/shared/TrackingCodeModal/ProjectCodeSnippet/ProjectCodeSnippet.js @@ -9,8 +9,8 @@ import CodeSnippet from '../../CodeSnippet'; const inputModeOptions = [ { label: 'Record all inputs', value: 'plain' }, - { label: 'Ignore all inputs', value: 'obscured' }, { label: 'Obscure all inputs', value: 'hidden' }, + { label: 'Ignore all inputs', value: 'obscured' }, ]; const inputModeOptionsMap = {} From 8d6afb2586cbcf5776c99c48bfeef3465219cc8e Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Tue, 7 Mar 2023 06:32:30 +0100 Subject: [PATCH 196/218] chore(helm): Clean cron every 2 days once Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay/charts/utilities/values.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/helmcharts/openreplay/charts/utilities/values.yaml b/scripts/helmcharts/openreplay/charts/utilities/values.yaml index 97ee29798..a8a2fddaa 100644 --- a/scripts/helmcharts/openreplay/charts/utilities/values.yaml +++ b/scripts/helmcharts/openreplay/charts/utilities/values.yaml @@ -80,8 +80,8 @@ nameOverride: "utilities" fullnameOverride: "utilities-openreplay" # 5 3 * * 1 “At 03:05 on Monday.” -# refer: https://crontab.guru/#5_3_*_*_1 -cron: "5 3 * * 1" +# refer: https://crontab.guru/#5_3_*_*_*/2 +cron: "5 3 * * */2" # Pod configurations From 9a0ba57d491ae7d37a26724932f0da001d3bda73 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 7 Mar 2023 09:09:12 +0100 Subject: [PATCH 197/218] fix(ui) - events tab checking for payload --- frontend/app/types/session/stackEvent.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/types/session/stackEvent.ts b/frontend/app/types/session/stackEvent.ts index 8ce375fc2..8bbea2778 100644 --- a/frontend/app/types/session/stackEvent.ts +++ b/frontend/app/types/session/stackEvent.ts @@ -58,7 +58,7 @@ export default class StackEvent { level: IStackEvent["level"]; constructor(evt: IStackEvent) { - const event = { ...evt, source: evt.source || OPENREPLAY } + const event = { ...evt, source: evt.source || OPENREPLAY, payload: evt.payload || {} }; Object.assign(this, { ...event, isRed: isRed(event), From 45a585d110ed05b76c07a45442e73e8501d60b62 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 7 Mar 2023 09:09:12 +0100 Subject: [PATCH 198/218] fix(ui) - events tab checking for payload --- frontend/app/types/session/stackEvent.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/types/session/stackEvent.ts b/frontend/app/types/session/stackEvent.ts index 8ce375fc2..8bbea2778 100644 --- a/frontend/app/types/session/stackEvent.ts +++ b/frontend/app/types/session/stackEvent.ts @@ -58,7 +58,7 @@ export default class StackEvent { level: IStackEvent["level"]; constructor(evt: IStackEvent) { - const event = { ...evt, source: evt.source || OPENREPLAY } + const event = { ...evt, source: evt.source || OPENREPLAY, payload: evt.payload || {} }; Object.assign(this, { ...event, isRed: isRed(event), From cbd8f34ec1628beb882316f1dbab341e09fc16b9 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Tue, 7 Mar 2023 10:03:56 +0100 Subject: [PATCH 199/218] change(tracker): change default input mode to obscured --- tracker/tracker/CHANGELOG.md | 4 ++++ tracker/tracker/src/main/modules/input.ts | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/tracker/tracker/CHANGELOG.md b/tracker/tracker/CHANGELOG.md index 559e4e865..329f3aa59 100644 --- a/tracker/tracker/CHANGELOG.md +++ b/tracker/tracker/CHANGELOG.md @@ -1,3 +1,7 @@ +## 5.0.1 + +- Default text input mode is now Obscured + ## 5.0.0 - Added "tel" to supported input types diff --git a/tracker/tracker/src/main/modules/input.ts b/tracker/tracker/src/main/modules/input.ts index 15acecaa9..e2e93bff7 100644 --- a/tracker/tracker/src/main/modules/input.ts +++ b/tracker/tracker/src/main/modules/input.ts @@ -89,7 +89,7 @@ export default function (app: App, opts: Partial): void { { obscureInputNumbers: true, obscureInputEmails: true, - defaultInputMode: InputMode.Plain, + defaultInputMode: InputMode.Obscured, obscureInputDates: false, }, opts, From a13363d97386811c7bb6f5ce3646520b6644cda2 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Tue, 7 Mar 2023 12:15:58 +0100 Subject: [PATCH 200/218] change(player): sort msgs by timestamp --- frontend/app/player/web/MessageManager.ts | 28 +++++++++++++++-------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/frontend/app/player/web/MessageManager.ts b/frontend/app/player/web/MessageManager.ts index 0d9aaee3d..e41bf04d5 100644 --- a/frontend/app/player/web/MessageManager.ts +++ b/frontend/app/player/web/MessageManager.ts @@ -196,7 +196,7 @@ export default class MessageManager { async loadMessages(isClickmap: boolean = false) { this.setMessagesLoading(true) // TODO: reusable decryptor instance - const createNewParser = (shouldDecrypt = true) => { + const createNewParser = (shouldDecrypt = true, file) => { const decrypt = shouldDecrypt && this.session.fileKey ? (b: Uint8Array) => decryptSessionBytes(b, this.session.fileKey) : (b: Uint8Array) => Promise.resolve(b) @@ -206,11 +206,21 @@ export default class MessageManager { fileReader.append(b) const msgs: Array = [] for (let msg = fileReader.readNext();msg !== null;msg = fileReader.readNext()) { - this.distributeMessage(msg, msg._index) msgs.push(msg) } + const sorted = msgs.sort((m1, m2) => m1.time - m2.time) + + let indx = sorted[0]._index + let counter = 0 + sorted.forEach(msg => { + if (indx > msg._index) counter++ + else indx = msg._index + this.distributeMessage(msg, msg._index) + }) + + if (counter > 0) console.warn("Unsorted mob file, error count: ", counter) + logger.info("Messages count: ", msgs.length, sorted, file) - logger.info("Messages count: ", msgs.length, msgs) this._sortMessagesHack(msgs) this.setMessagesLoading(false) }) @@ -219,8 +229,8 @@ export default class MessageManager { this.waitingForFiles = true const loadMethod = this.session.domURL && this.session.domURL.length > 0 - ? { url: this.session.domURL, parser: createNewParser } - : { url: this.session.mobsUrl, parser: () => createNewParser(false)} + ? { url: this.session.domURL, parser: () => createNewParser(true, 'dom') } + : { url: this.session.mobsUrl, parser: () => createNewParser(false, 'dom')} loadFiles(loadMethod.url, loadMethod.parser()) // EFS fallback @@ -235,11 +245,11 @@ export default class MessageManager { // load devtools (TODO: start after the first DOM file download) if (isClickmap) return; this.state.update({ devtoolsLoading: true }) - loadFiles(this.session.devtoolsURL, createNewParser()) + loadFiles(this.session.devtoolsURL, createNewParser(true, 'devtools')) // EFS fallback .catch(() => requestEFSDevtools(this.session.sessionId) - .then(createNewParser(false)) + .then(createNewParser(false, 'devtools')) ) .then(() => { this.state.update(this.lists.getFullListsState()) // TODO: also in case of dynamic update through assist @@ -406,7 +416,7 @@ export default class MessageManager { this.lists.lists.fetch.insert(getResourceFromNetworkRequest(msg, this.sessionStart)) break; case MType.Redux: - logger.log('redux', msg) + // logger.log('redux', msg) this.lists.lists.redux.append(msg); break; case MType.NgRx: @@ -414,7 +424,7 @@ export default class MessageManager { this.lists.lists.ngrx.append(msg); break; case MType.Vuex: - logger.log('vuex', msg) + // logger.log('vuex', msg) this.lists.lists.vuex.append(msg); break; case MType.Zustand: From 6623e902e93ec034387d688adcbc7705b17685cd Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Tue, 7 Mar 2023 12:18:43 +0100 Subject: [PATCH 201/218] change(player): small fix --- frontend/app/player/web/MessageManager.ts | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/frontend/app/player/web/MessageManager.ts b/frontend/app/player/web/MessageManager.ts index e41bf04d5..c54b86fb8 100644 --- a/frontend/app/player/web/MessageManager.ts +++ b/frontend/app/player/web/MessageManager.ts @@ -211,14 +211,14 @@ export default class MessageManager { const sorted = msgs.sort((m1, m2) => m1.time - m2.time) let indx = sorted[0]._index - let counter = 0 + let outOfOrderCounter = 0 sorted.forEach(msg => { - if (indx > msg._index) counter++ + if (indx > msg._index) outOfOrderCounter++ else indx = msg._index this.distributeMessage(msg, msg._index) }) - if (counter > 0) console.warn("Unsorted mob file, error count: ", counter) + if (outOfOrderCounter > 0) console.warn("Unsorted mob file, error count: ", outOfOrderCounter) logger.info("Messages count: ", msgs.length, sorted, file) this._sortMessagesHack(msgs) @@ -236,7 +236,7 @@ export default class MessageManager { // EFS fallback .catch((e) => requestEFSDom(this.session.sessionId) - .then(createNewParser(false)) + .then(createNewParser(false, 'domEFS')) ) .then(this.onFileReadSuccess) .catch(this.onFileReadFailed) @@ -249,7 +249,7 @@ export default class MessageManager { // EFS fallback .catch(() => requestEFSDevtools(this.session.sessionId) - .then(createNewParser(false, 'devtools')) + .then(createNewParser(false, 'devtoolsEFS')) ) .then(() => { this.state.update(this.lists.getFullListsState()) // TODO: also in case of dynamic update through assist @@ -416,23 +416,18 @@ export default class MessageManager { this.lists.lists.fetch.insert(getResourceFromNetworkRequest(msg, this.sessionStart)) break; case MType.Redux: - // logger.log('redux', msg) this.lists.lists.redux.append(msg); break; case MType.NgRx: - logger.log('ngrx', msg) this.lists.lists.ngrx.append(msg); break; case MType.Vuex: - // logger.log('vuex', msg) this.lists.lists.vuex.append(msg); break; case MType.Zustand: - logger.log('zustand', msg) this.lists.lists.zustand.append(msg) break case MType.MobX: - logger.log('mobx', msg) this.lists.lists.mobx.append(msg); break; case MType.GraphQl: From 67e8ad209e5281b73d2ca945c02fcaae75736070 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Tue, 7 Mar 2023 12:15:58 +0100 Subject: [PATCH 202/218] fix(ui) - player --- frontend/app/player/web/MessageManager.ts | 38 +++++++++++++---------- 1 file changed, 21 insertions(+), 17 deletions(-) diff --git a/frontend/app/player/web/MessageManager.ts b/frontend/app/player/web/MessageManager.ts index 330d1c861..2752d8bab 100644 --- a/frontend/app/player/web/MessageManager.ts +++ b/frontend/app/player/web/MessageManager.ts @@ -196,7 +196,7 @@ export default class MessageManager { async loadMessages(isClickmap: boolean = false) { this.setMessagesLoading(true) // TODO: reusable decryptor instance - const createNewParser = (shouldDecrypt = true) => { + const createNewParser = (shouldDecrypt = true, file) => { const decrypt = shouldDecrypt && this.session.fileKey ? (b: Uint8Array) => decryptSessionBytes(b, this.session.fileKey) : (b: Uint8Array) => Promise.resolve(b) @@ -206,11 +206,21 @@ export default class MessageManager { fileReader.append(b) const msgs: Array = [] for (let msg = fileReader.readNext();msg !== null;msg = fileReader.readNext()) { - this.distributeMessage(msg, msg._index) msgs.push(msg) } + const sorted = msgs.sort((m1, m2) => m1.time - m2.time) + + let indx = sorted[0]._index + let counter = 0 + sorted.forEach(msg => { + if (indx > msg._index) counter++ + else indx = msg._index + this.distributeMessage(msg, msg._index) + }) + + if (counter > 0) console.warn("Unsorted mob file, error count: ", counter) + logger.info("Messages count: ", msgs.length, sorted, file) - logger.info("Messages count: ", msgs.length, msgs) this._sortMessagesHack(msgs) this.setMessagesLoading(false) }) @@ -219,8 +229,8 @@ export default class MessageManager { this.waitingForFiles = true const loadMethod = this.session.domURL && this.session.domURL.length > 0 - ? { url: this.session.domURL, parser: createNewParser } - : { url: this.session.mobsUrl, parser: () => createNewParser(false)} + ? { url: this.session.domURL, parser: () => createNewParser(true, 'dom') } + : { url: this.session.mobsUrl, parser: () => createNewParser(false, 'dom')} loadFiles(loadMethod.url, loadMethod.parser()) // EFS fallback @@ -235,11 +245,11 @@ export default class MessageManager { // load devtools (TODO: start after the first DOM file download) if (isClickmap) return; this.state.update({ devtoolsLoading: true }) - loadFiles(this.session.devtoolsURL, createNewParser()) + loadFiles(this.session.devtoolsURL, createNewParser(true, 'devtools')) // EFS fallback .catch(() => requestEFSDevtools(this.session.sessionId) - .then(createNewParser(false)) + .then(createNewParser(false, 'devtools')) ) .then(() => { this.state.update(this.lists.getFullListsState()) // TODO: also in case of dynamic update through assist @@ -406,11 +416,8 @@ export default class MessageManager { this.lists.lists.fetch.insert(getResourceFromNetworkRequest(msg, this.sessionStart)) break; case MType.Redux: - decoded = this.decodeStateMessage(msg, ["state", "action"]); - logger.log('redux', decoded) - if (decoded != null) { - this.lists.lists.redux.append(decoded); - } + // logger.log('redux', msg) + this.lists.lists.redux.append(msg); break; case MType.NgRx: decoded = this.decodeStateMessage(msg, ["state", "action"]); @@ -420,11 +427,8 @@ export default class MessageManager { } break; case MType.Vuex: - decoded = this.decodeStateMessage(msg, ["state", "mutation"]); - logger.log('vuex', decoded) - if (decoded != null) { - this.lists.lists.vuex.append(decoded); - } + // logger.log('vuex', msg) + this.lists.lists.vuex.append(msg); break; case MType.Zustand: decoded = this.decodeStateMessage(msg, ["state", "mutation"]) From 3185eace860b8f9e8701c206045450c405ac1620 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Tue, 7 Mar 2023 12:18:43 +0100 Subject: [PATCH 203/218] fix(ui) - player --- frontend/app/player/web/MessageManager.ts | 38 +++++++++-------------- 1 file changed, 14 insertions(+), 24 deletions(-) diff --git a/frontend/app/player/web/MessageManager.ts b/frontend/app/player/web/MessageManager.ts index 2752d8bab..c54b86fb8 100644 --- a/frontend/app/player/web/MessageManager.ts +++ b/frontend/app/player/web/MessageManager.ts @@ -107,7 +107,7 @@ export default class MessageManager { private scrollManager: ListWalker = new ListWalker(); - private readonly decoder = new Decoder(); + public readonly decoder = new Decoder(); private readonly lists: Lists; private activityManager: ActivityManager | null = null; @@ -211,14 +211,14 @@ export default class MessageManager { const sorted = msgs.sort((m1, m2) => m1.time - m2.time) let indx = sorted[0]._index - let counter = 0 + let outOfOrderCounter = 0 sorted.forEach(msg => { - if (indx > msg._index) counter++ + if (indx > msg._index) outOfOrderCounter++ else indx = msg._index this.distributeMessage(msg, msg._index) }) - if (counter > 0) console.warn("Unsorted mob file, error count: ", counter) + if (outOfOrderCounter > 0) console.warn("Unsorted mob file, error count: ", outOfOrderCounter) logger.info("Messages count: ", msgs.length, sorted, file) this._sortMessagesHack(msgs) @@ -236,7 +236,7 @@ export default class MessageManager { // EFS fallback .catch((e) => requestEFSDom(this.session.sessionId) - .then(createNewParser(false)) + .then(createNewParser(false, 'domEFS')) ) .then(this.onFileReadSuccess) .catch(this.onFileReadFailed) @@ -249,7 +249,7 @@ export default class MessageManager { // EFS fallback .catch(() => requestEFSDevtools(this.session.sessionId) - .then(createNewParser(false, 'devtools')) + .then(createNewParser(false, 'devtoolsEFS')) ) .then(() => { this.state.update(this.lists.getFullListsState()) // TODO: also in case of dynamic update through assist @@ -416,33 +416,19 @@ export default class MessageManager { this.lists.lists.fetch.insert(getResourceFromNetworkRequest(msg, this.sessionStart)) break; case MType.Redux: - // logger.log('redux', msg) this.lists.lists.redux.append(msg); break; case MType.NgRx: - decoded = this.decodeStateMessage(msg, ["state", "action"]); - logger.log('ngrx', decoded) - if (decoded != null) { - this.lists.lists.ngrx.append(decoded); - } + this.lists.lists.ngrx.append(msg); break; case MType.Vuex: - // logger.log('vuex', msg) this.lists.lists.vuex.append(msg); break; case MType.Zustand: - decoded = this.decodeStateMessage(msg, ["state", "mutation"]) - logger.log('zustand', decoded) - if (decoded != null) { - this.lists.lists.zustand.append(decoded) - } + this.lists.lists.zustand.append(msg) + break case MType.MobX: - decoded = this.decodeStateMessage(msg, ["payload"]); - logger.log('mobx', decoded) - - if (decoded != null) { - this.lists.lists.mobx.append(decoded); - } + this.lists.lists.mobx.append(msg); break; case MType.GraphQl: this.lists.lists.graphql.append(msg); @@ -482,6 +468,10 @@ export default class MessageManager { this.state.update({ messagesLoading, ready: !messagesLoading && !this.state.get().cssLoading }); } + decodeMessage(msg: Message) { + return this.decoder.decode(msg) + } + private setSize({ height, width }: { height: number, width: number }) { this.screen.scale({ height, width }); this.state.update({ width, height }); From 7a1979dfd2db6f413e8c4604a1be15c60afd948c Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 8 Mar 2023 12:49:32 +0100 Subject: [PATCH 204/218] fix(player): fix clickmap session size? --- .../app/components/Session/Player/ClickMapRenderer/Renderer.tsx | 1 - frontend/app/player/web/Screen/Screen.ts | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/frontend/app/components/Session/Player/ClickMapRenderer/Renderer.tsx b/frontend/app/components/Session/Player/ClickMapRenderer/Renderer.tsx index 8a51717e5..29da5800c 100644 --- a/frontend/app/components/Session/Player/ClickMapRenderer/Renderer.tsx +++ b/frontend/app/components/Session/Player/ClickMapRenderer/Renderer.tsx @@ -16,7 +16,6 @@ function Player() { } }, []); - if (!playerContext.player) return null; return ( diff --git a/frontend/app/player/web/Screen/Screen.ts b/frontend/app/player/web/Screen/Screen.ts index cca56d402..377f704ed 100644 --- a/frontend/app/player/web/Screen/Screen.ts +++ b/frontend/app/player/web/Screen/Screen.ts @@ -218,7 +218,7 @@ export default class Screen { case ScaleMode.AdjustParentHeight: this.scaleRatio = offsetWidth / width translate = "translate(-50%, 0)" - posStyles = { top: 0, height: this.document!.documentElement.getBoundingClientRect().height + 'px', } + posStyles = { top: 0, height: height + 'px', } break; } From c3a4a6012db7995a18f682a17c1ebf67cb11c0ad Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 8 Mar 2023 15:30:22 +0100 Subject: [PATCH 205/218] fix(player): fix clickmap url filtering, fix clickmap highliter --- .../CustomMetricsWidgets/ClickMapCard/ClickMapCard.tsx | 8 +++----- frontend/app/player/web/addons/TargetMarker.ts | 2 +- frontend/app/player/web/addons/clickmapStyles.ts | 6 +++--- 3 files changed, 7 insertions(+), 9 deletions(-) diff --git a/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/ClickMapCard/ClickMapCard.tsx b/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/ClickMapCard/ClickMapCard.tsx index e8ff709c9..c77dbcd95 100644 --- a/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/ClickMapCard/ClickMapCard.tsx +++ b/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/ClickMapCard/ClickMapCard.tsx @@ -20,6 +20,7 @@ function ClickMapCard({ const onMarkerClick = (s: string, innerText: string) => { metricStore.changeClickMapSearch(s, innerText) } + const mapUrl = metricStore.instance.series[0].filter.filters[0].value[0] React.useEffect(() => { return () => clearCurrentSession() @@ -32,12 +33,10 @@ function ClickMapCard({ React.useEffect(() => { if (visitedEvents.length) { - const urlOptions = visitedEvents.map(({ url, host }: any) => ({ label: url, value: url, host })) - const url = insightsFilters.url ? insightsFilters.url : host + urlOptions[0].value; const rangeValue = dashboardStore.drillDownPeriod.rangeValue const startDate = dashboardStore.drillDownPeriod.start const endDate = dashboardStore.drillDownPeriod.end - fetchInsights({ ...insightsFilters, url, startDate, endDate, rangeValue, clickRage: metricStore.clickMapFilter }) + fetchInsights({ ...insightsFilters, url: mapUrl || '/', startDate, endDate, rangeValue, clickRage: metricStore.clickMapFilter }) } }, [visitedEvents, metricStore.clickMapFilter]) @@ -62,9 +61,8 @@ function ClickMapCard({ return
Loading session
} - const searchUrl = metricStore.instance.series[0].filter.filters[0].value[0] const jumpToEvent = metricStore.instance.data.events.find((evt: Record) => { - if (searchUrl) return evt.path.includes(searchUrl) + if (mapUrl) return evt.path.includes(mapUrl) return evt }) || { timestamp: metricStore.instance.data.startTs } diff --git a/frontend/app/player/web/addons/TargetMarker.ts b/frontend/app/player/web/addons/TargetMarker.ts index 6629ceaec..452ddd00f 100644 --- a/frontend/app/player/web/addons/TargetMarker.ts +++ b/frontend/app/player/web/addons/TargetMarker.ts @@ -240,7 +240,7 @@ export default class TargetMarker { }) } - Object.assign(smallClicksBubble.style, clickmapStyles.clicks({ top, height, isRage: s.clickRage })) + Object.assign(smallClicksBubble.style, clickmapStyles.clicks({ top, height, isRage: s.clickRage, left })) border.appendChild(smallClicksBubble) overlay.appendChild(bubbleContainer) diff --git a/frontend/app/player/web/addons/clickmapStyles.ts b/frontend/app/player/web/addons/clickmapStyles.ts index 0ab795ea0..f0dc65a9c 100644 --- a/frontend/app/player/web/addons/clickmapStyles.ts +++ b/frontend/app/player/web/addons/clickmapStyles.ts @@ -16,7 +16,7 @@ export const clickmapStyles = { }, bubbleContainer: ({ top, left, height }: { top: number; left: number, height: number }) => ({ position: 'absolute', - top: top > 20 ? top + 'px' : height + 2 + 'px', + top: top > 75 ? top + 'px' : height+75 + 'px', width: '250px', left: `${left}px`, padding: '10px', @@ -51,9 +51,9 @@ export const clickmapStyles = { position: 'absolute', zIndex, }), - clicks: ({ top, height, isRage }: { top: number; height: number, isRage?: boolean }) => ({ + clicks: ({ top, height, isRage, left }: { top: number; height: number, isRage?: boolean, left: number }) => ({ top: top > 20 ? 0 : `${height}px`, - left: 0, + left: left < 5 ? '100%' : 0, position: 'absolute', borderRadius: '999px', padding: '6px', From 986b5a8802ec953f9035b16e87b47425bb6249f9 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 8 Mar 2023 16:13:56 +0100 Subject: [PATCH 206/218] fix(player): fix clickmap selectors --- frontend/app/player/web/Screen/Screen.ts | 3 +-- tracker/tracker/CHANGELOG.md | 1 + tracker/tracker/package.json | 3 ++- tracker/tracker/src/main/modules/mouse.ts | 27 ++++++++--------------- 4 files changed, 13 insertions(+), 21 deletions(-) diff --git a/frontend/app/player/web/Screen/Screen.ts b/frontend/app/player/web/Screen/Screen.ts index 377f704ed..f27a251f1 100644 --- a/frontend/app/player/web/Screen/Screen.ts +++ b/frontend/app/player/web/Screen/Screen.ts @@ -3,7 +3,6 @@ import Cursor from './Cursor' import type { Point, Dimensions } from './types'; - export type State = Dimensions export const INITIAL_STATE: State = { @@ -182,7 +181,7 @@ export default class Screen { getElementBySelector(selector: string) { if (!selector) return null; try { - const safeSelector = selector.replace(/:/g, '\\\\3A ').replace(/\//g, '\\/'); + const safeSelector = selector.replace(/\//g, '\\/'); return this.document?.querySelector(safeSelector) || null; } catch (e) { console.error("Can not select element. ", e) diff --git a/tracker/tracker/CHANGELOG.md b/tracker/tracker/CHANGELOG.md index 329f3aa59..6a8e25690 100644 --- a/tracker/tracker/CHANGELOG.md +++ b/tracker/tracker/CHANGELOG.md @@ -1,6 +1,7 @@ ## 5.0.1 - Default text input mode is now Obscured +- Use `@medv/finder` instead of our own implementation of `getSelector` for better clickmaps experience ## 5.0.0 diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index c45c15e4a..326b45575 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "5.0.0", + "version": "5.0.1-beta.1", "keywords": [ "logging", "replay" @@ -47,6 +47,7 @@ "typescript": "^4.9.4" }, "dependencies": { + "@medv/finder": "^3.0.0", "error-stack-parser": "^2.0.6" }, "engines": { diff --git a/tracker/tracker/src/main/modules/mouse.ts b/tracker/tracker/src/main/modules/mouse.ts index b00d6d304..2b32a99d7 100644 --- a/tracker/tracker/src/main/modules/mouse.ts +++ b/tracker/tracker/src/main/modules/mouse.ts @@ -3,26 +3,17 @@ import { hasTag, isSVGElement, isDocument } from '../app/guards.js' import { normSpaces, hasOpenreplayAttribute, getLabelAttribute } from '../utils.js' import { MouseMove, MouseClick } from '../app/messages.gen.js' import { getInputLabel } from './input.js' +import { finder } from '@medv/finder' function _getSelector(target: Element, document: Document): string { - let el: Element | null = target - let selector: string | null = null - do { - if (el.id) { - return `#${el.id}` + (selector ? ` > ${selector}` : '') - } - selector = - el.className - .split(' ') - .map((cn) => cn.trim()) - .filter((cn) => cn !== '') - .reduce((sel, cn) => `${sel}.${cn}`, el.tagName.toLowerCase()) + - (selector ? ` > ${selector}` : '') - if (el === document.body) { - return selector - } - el = el.parentElement - } while (el !== document.body && el !== null) + const selector = finder(target, { + root: document.body, + seedMinLength: 3, + optimizedMinLength: 2, + threshold: 1000, + maxNumberOfTries: 10_000, + }) + return selector } From 0d857822e80f49b9fb92a947dc1fffca113351c3 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Wed, 8 Mar 2023 16:40:39 +0100 Subject: [PATCH 207/218] chore(helm): disable service monitor for some services --- scripts/helmcharts/openreplay/charts/alerts/values.yaml | 2 +- scripts/helmcharts/openreplay/charts/chalice/values.yaml | 2 +- scripts/helmcharts/openreplay/charts/peers/values.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/helmcharts/openreplay/charts/alerts/values.yaml b/scripts/helmcharts/openreplay/charts/alerts/values.yaml index a54418a9f..ca76602c1 100644 --- a/scripts/helmcharts/openreplay/charts/alerts/values.yaml +++ b/scripts/helmcharts/openreplay/charts/alerts/values.yaml @@ -51,7 +51,7 @@ service: metrics: 8888 serviceMonitor: - enabled: true + enabled: false additionalLabels: release: observability scrapeConfigs: diff --git a/scripts/helmcharts/openreplay/charts/chalice/values.yaml b/scripts/helmcharts/openreplay/charts/chalice/values.yaml index 3269aa503..1a1d496ed 100644 --- a/scripts/helmcharts/openreplay/charts/chalice/values.yaml +++ b/scripts/helmcharts/openreplay/charts/chalice/values.yaml @@ -51,7 +51,7 @@ service: metrics: 8888 serviceMonitor: - enabled: true + enabled: false additionalLabels: release: observability scrapeConfigs: diff --git a/scripts/helmcharts/openreplay/charts/peers/values.yaml b/scripts/helmcharts/openreplay/charts/peers/values.yaml index 57fc30bde..0bc4b6b14 100644 --- a/scripts/helmcharts/openreplay/charts/peers/values.yaml +++ b/scripts/helmcharts/openreplay/charts/peers/values.yaml @@ -49,7 +49,7 @@ podSecurityContext: # port: 9000 serviceMonitor: - enabled: true + enabled: false additionalLabels: release: observability scrapeConfigs: From a1b3eb57ec34c317ac68d1da585f6605eecca133 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 8 Mar 2023 16:41:49 +0100 Subject: [PATCH 208/218] fix(player): track tr th clicks for map --- third-party.md | 2 +- tracker/tracker/package.json | 2 +- tracker/tracker/src/main/modules/mouse.ts | 2 ++ 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/third-party.md b/third-party.md index e0b68d9f6..0cfe2cac2 100644 --- a/third-party.md +++ b/third-party.md @@ -115,4 +115,4 @@ Below is the list of dependencies used in OpenReplay software. Licenses may chan | yq | MIT | Infrastructure | | html2canvas | MIT | JavaScript | | eget | MIT | Infrastructure | - +| @medv/finder | MIT | JavaScript | diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index 326b45575..a67073cc9 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "5.0.1-beta.1", + "version": "5.0.1-beta.2", "keywords": [ "logging", "replay" diff --git a/tracker/tracker/src/main/modules/mouse.ts b/tracker/tracker/src/main/modules/mouse.ts index 2b32a99d7..155a14a8d 100644 --- a/tracker/tracker/src/main/modules/mouse.ts +++ b/tracker/tracker/src/main/modules/mouse.ts @@ -24,6 +24,8 @@ function isClickable(element: Element): boolean { tag === 'A' || tag === 'LI' || tag === 'SELECT' || + tag === 'TR' || + tag === 'TH' || (element as HTMLElement).onclick != null || element.getAttribute('role') === 'button' ) From d047570eb936c29f0820ded4177bd8ac94f6a1ea Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 8 Mar 2023 12:49:32 +0100 Subject: [PATCH 209/218] fix(player): fix clickmap session size? --- .../app/components/Session/Player/ClickMapRenderer/Renderer.tsx | 1 - frontend/app/player/web/Screen/Screen.ts | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/frontend/app/components/Session/Player/ClickMapRenderer/Renderer.tsx b/frontend/app/components/Session/Player/ClickMapRenderer/Renderer.tsx index 8a51717e5..29da5800c 100644 --- a/frontend/app/components/Session/Player/ClickMapRenderer/Renderer.tsx +++ b/frontend/app/components/Session/Player/ClickMapRenderer/Renderer.tsx @@ -16,7 +16,6 @@ function Player() { } }, []); - if (!playerContext.player) return null; return ( diff --git a/frontend/app/player/web/Screen/Screen.ts b/frontend/app/player/web/Screen/Screen.ts index cca56d402..377f704ed 100644 --- a/frontend/app/player/web/Screen/Screen.ts +++ b/frontend/app/player/web/Screen/Screen.ts @@ -218,7 +218,7 @@ export default class Screen { case ScaleMode.AdjustParentHeight: this.scaleRatio = offsetWidth / width translate = "translate(-50%, 0)" - posStyles = { top: 0, height: this.document!.documentElement.getBoundingClientRect().height + 'px', } + posStyles = { top: 0, height: height + 'px', } break; } From 022928d082ae0233189b1e164380df40f6f0cf72 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 8 Mar 2023 15:30:22 +0100 Subject: [PATCH 210/218] fix(player): fix clickmap url filtering, fix clickmap highliter --- .../CustomMetricsWidgets/ClickMapCard/ClickMapCard.tsx | 8 +++----- frontend/app/player/web/addons/TargetMarker.ts | 2 +- frontend/app/player/web/addons/clickmapStyles.ts | 6 +++--- 3 files changed, 7 insertions(+), 9 deletions(-) diff --git a/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/ClickMapCard/ClickMapCard.tsx b/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/ClickMapCard/ClickMapCard.tsx index e8ff709c9..c77dbcd95 100644 --- a/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/ClickMapCard/ClickMapCard.tsx +++ b/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/ClickMapCard/ClickMapCard.tsx @@ -20,6 +20,7 @@ function ClickMapCard({ const onMarkerClick = (s: string, innerText: string) => { metricStore.changeClickMapSearch(s, innerText) } + const mapUrl = metricStore.instance.series[0].filter.filters[0].value[0] React.useEffect(() => { return () => clearCurrentSession() @@ -32,12 +33,10 @@ function ClickMapCard({ React.useEffect(() => { if (visitedEvents.length) { - const urlOptions = visitedEvents.map(({ url, host }: any) => ({ label: url, value: url, host })) - const url = insightsFilters.url ? insightsFilters.url : host + urlOptions[0].value; const rangeValue = dashboardStore.drillDownPeriod.rangeValue const startDate = dashboardStore.drillDownPeriod.start const endDate = dashboardStore.drillDownPeriod.end - fetchInsights({ ...insightsFilters, url, startDate, endDate, rangeValue, clickRage: metricStore.clickMapFilter }) + fetchInsights({ ...insightsFilters, url: mapUrl || '/', startDate, endDate, rangeValue, clickRage: metricStore.clickMapFilter }) } }, [visitedEvents, metricStore.clickMapFilter]) @@ -62,9 +61,8 @@ function ClickMapCard({ return
Loading session
} - const searchUrl = metricStore.instance.series[0].filter.filters[0].value[0] const jumpToEvent = metricStore.instance.data.events.find((evt: Record) => { - if (searchUrl) return evt.path.includes(searchUrl) + if (mapUrl) return evt.path.includes(mapUrl) return evt }) || { timestamp: metricStore.instance.data.startTs } diff --git a/frontend/app/player/web/addons/TargetMarker.ts b/frontend/app/player/web/addons/TargetMarker.ts index 6629ceaec..452ddd00f 100644 --- a/frontend/app/player/web/addons/TargetMarker.ts +++ b/frontend/app/player/web/addons/TargetMarker.ts @@ -240,7 +240,7 @@ export default class TargetMarker { }) } - Object.assign(smallClicksBubble.style, clickmapStyles.clicks({ top, height, isRage: s.clickRage })) + Object.assign(smallClicksBubble.style, clickmapStyles.clicks({ top, height, isRage: s.clickRage, left })) border.appendChild(smallClicksBubble) overlay.appendChild(bubbleContainer) diff --git a/frontend/app/player/web/addons/clickmapStyles.ts b/frontend/app/player/web/addons/clickmapStyles.ts index 0ab795ea0..f0dc65a9c 100644 --- a/frontend/app/player/web/addons/clickmapStyles.ts +++ b/frontend/app/player/web/addons/clickmapStyles.ts @@ -16,7 +16,7 @@ export const clickmapStyles = { }, bubbleContainer: ({ top, left, height }: { top: number; left: number, height: number }) => ({ position: 'absolute', - top: top > 20 ? top + 'px' : height + 2 + 'px', + top: top > 75 ? top + 'px' : height+75 + 'px', width: '250px', left: `${left}px`, padding: '10px', @@ -51,9 +51,9 @@ export const clickmapStyles = { position: 'absolute', zIndex, }), - clicks: ({ top, height, isRage }: { top: number; height: number, isRage?: boolean }) => ({ + clicks: ({ top, height, isRage, left }: { top: number; height: number, isRage?: boolean, left: number }) => ({ top: top > 20 ? 0 : `${height}px`, - left: 0, + left: left < 5 ? '100%' : 0, position: 'absolute', borderRadius: '999px', padding: '6px', From 604b095b63c55d65ba73f205d0e0919fb9c56327 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 8 Mar 2023 16:13:56 +0100 Subject: [PATCH 211/218] 986b5a8802ec953f9035b16e87b47425bb6249f9 resolved conflicts --- frontend/app/player/web/Screen/Screen.ts | 3 +-- tracker/tracker/CHANGELOG.md | 5 ++++- tracker/tracker/package.json | 3 ++- tracker/tracker/src/main/modules/mouse.ts | 27 ++++++++--------------- 4 files changed, 16 insertions(+), 22 deletions(-) diff --git a/frontend/app/player/web/Screen/Screen.ts b/frontend/app/player/web/Screen/Screen.ts index 377f704ed..f27a251f1 100644 --- a/frontend/app/player/web/Screen/Screen.ts +++ b/frontend/app/player/web/Screen/Screen.ts @@ -3,7 +3,6 @@ import Cursor from './Cursor' import type { Point, Dimensions } from './types'; - export type State = Dimensions export const INITIAL_STATE: State = { @@ -182,7 +181,7 @@ export default class Screen { getElementBySelector(selector: string) { if (!selector) return null; try { - const safeSelector = selector.replace(/:/g, '\\\\3A ').replace(/\//g, '\\/'); + const safeSelector = selector.replace(/\//g, '\\/'); return this.document?.querySelector(safeSelector) || null; } catch (e) { console.error("Can not select element. ", e) diff --git a/tracker/tracker/CHANGELOG.md b/tracker/tracker/CHANGELOG.md index 559e4e865..e3a4c3d78 100644 --- a/tracker/tracker/CHANGELOG.md +++ b/tracker/tracker/CHANGELOG.md @@ -1,4 +1,7 @@ -## 5.0.0 +## 5.0.1 + +- Default text input mode is now Obscured +- Use `@medv/finder` instead of our own implementation of `getSelector` for better clickmaps experience - Added "tel" to supported input types - Added `{ withCurrentTime: true }` to `tracker.getSessionURL` method which will return sessionURL with current session's timestamp diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index c45c15e4a..326b45575 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "5.0.0", + "version": "5.0.1-beta.1", "keywords": [ "logging", "replay" @@ -47,6 +47,7 @@ "typescript": "^4.9.4" }, "dependencies": { + "@medv/finder": "^3.0.0", "error-stack-parser": "^2.0.6" }, "engines": { diff --git a/tracker/tracker/src/main/modules/mouse.ts b/tracker/tracker/src/main/modules/mouse.ts index b00d6d304..2b32a99d7 100644 --- a/tracker/tracker/src/main/modules/mouse.ts +++ b/tracker/tracker/src/main/modules/mouse.ts @@ -3,26 +3,17 @@ import { hasTag, isSVGElement, isDocument } from '../app/guards.js' import { normSpaces, hasOpenreplayAttribute, getLabelAttribute } from '../utils.js' import { MouseMove, MouseClick } from '../app/messages.gen.js' import { getInputLabel } from './input.js' +import { finder } from '@medv/finder' function _getSelector(target: Element, document: Document): string { - let el: Element | null = target - let selector: string | null = null - do { - if (el.id) { - return `#${el.id}` + (selector ? ` > ${selector}` : '') - } - selector = - el.className - .split(' ') - .map((cn) => cn.trim()) - .filter((cn) => cn !== '') - .reduce((sel, cn) => `${sel}.${cn}`, el.tagName.toLowerCase()) + - (selector ? ` > ${selector}` : '') - if (el === document.body) { - return selector - } - el = el.parentElement - } while (el !== document.body && el !== null) + const selector = finder(target, { + root: document.body, + seedMinLength: 3, + optimizedMinLength: 2, + threshold: 1000, + maxNumberOfTries: 10_000, + }) + return selector } From 8c14743425dbb45b8089b37aae4bdbc3caf7b6a9 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 8 Mar 2023 16:41:49 +0100 Subject: [PATCH 212/218] fix(player): track tr th clicks for map --- third-party.md | 2 +- tracker/tracker/package.json | 2 +- tracker/tracker/src/main/modules/mouse.ts | 2 ++ 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/third-party.md b/third-party.md index e0b68d9f6..0cfe2cac2 100644 --- a/third-party.md +++ b/third-party.md @@ -115,4 +115,4 @@ Below is the list of dependencies used in OpenReplay software. Licenses may chan | yq | MIT | Infrastructure | | html2canvas | MIT | JavaScript | | eget | MIT | Infrastructure | - +| @medv/finder | MIT | JavaScript | diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index 326b45575..a67073cc9 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "5.0.1-beta.1", + "version": "5.0.1-beta.2", "keywords": [ "logging", "replay" diff --git a/tracker/tracker/src/main/modules/mouse.ts b/tracker/tracker/src/main/modules/mouse.ts index 2b32a99d7..155a14a8d 100644 --- a/tracker/tracker/src/main/modules/mouse.ts +++ b/tracker/tracker/src/main/modules/mouse.ts @@ -24,6 +24,8 @@ function isClickable(element: Element): boolean { tag === 'A' || tag === 'LI' || tag === 'SELECT' || + tag === 'TR' || + tag === 'TH' || (element as HTMLElement).onclick != null || element.getAttribute('role') === 'button' ) From b9d5daf9ca329af2ed7416dedd9abacfff05a692 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 8 Mar 2023 17:06:43 +0100 Subject: [PATCH 213/218] feat(chalice): funnels fixed negatif-event multi-value support --- api/chalicelib/core/significance.py | 6 ++++-- ee/api/chalicelib/core/significance.py | 6 ++++-- ee/api/chalicelib/core/significance_exp.py | 6 ++++-- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index 64028a8df..fd824509d 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -167,12 +167,14 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]), value_key=f"value{i + 1}")} - if sh.is_negation_operator(op) and i > 0: + if sh.is_negation_operator(s["operator"]) and i > 0: op = sh.reverse_sql_operator(op) main_condition = "left_not.session_id ISNULL" extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id FROM {next_table} AS s_main - WHERE s_main.{next_col_name} {op} %(value{i + 1})s + WHERE + {sh.multi_conditions(f"s_main.{next_col_name} {op} %(value{i + 1})s", + values=s["value"], value_key=f"value{i + 1}")} AND s_main.timestamp >= T{i}.stage{i}_timestamp AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""") else: diff --git a/ee/api/chalicelib/core/significance.py b/ee/api/chalicelib/core/significance.py index ae1f0c867..52650bfd7 100644 --- a/ee/api/chalicelib/core/significance.py +++ b/ee/api/chalicelib/core/significance.py @@ -173,12 +173,14 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]), value_key=f"value{i + 1}")} - if sh.is_negation_operator(op) and i > 0: + if sh.is_negation_operator(s["operator"]) and i > 0: op = sh.reverse_sql_operator(op) main_condition = "left_not.session_id ISNULL" extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id FROM {next_table} AS s_main - WHERE s_main.{next_col_name} {op} %(value{i + 1})s + WHERE + {sh.multi_conditions(f"s_main.{next_col_name} {op} %(value{i + 1})s", + values=s["value"], value_key=f"value{i + 1}")} AND s_main.timestamp >= T{i}.stage{i}_timestamp AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""") else: diff --git a/ee/api/chalicelib/core/significance_exp.py b/ee/api/chalicelib/core/significance_exp.py index ae1f0c867..52650bfd7 100644 --- a/ee/api/chalicelib/core/significance_exp.py +++ b/ee/api/chalicelib/core/significance_exp.py @@ -173,12 +173,14 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]), value_key=f"value{i + 1}")} - if sh.is_negation_operator(op) and i > 0: + if sh.is_negation_operator(s["operator"]) and i > 0: op = sh.reverse_sql_operator(op) main_condition = "left_not.session_id ISNULL" extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id FROM {next_table} AS s_main - WHERE s_main.{next_col_name} {op} %(value{i + 1})s + WHERE + {sh.multi_conditions(f"s_main.{next_col_name} {op} %(value{i + 1})s", + values=s["value"], value_key=f"value{i + 1}")} AND s_main.timestamp >= T{i}.stage{i}_timestamp AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""") else: From c6aac11cbf41a94e30cec2ea3c070c8668890b5e Mon Sep 17 00:00:00 2001 From: Alexander Date: Thu, 9 Mar 2023 09:54:12 +0100 Subject: [PATCH 214/218] Heuristics refactoring (#987) * feat(backend): refactored heuristics service * feat(backend): refactored db service (moved several events to heuristics) --- backend/cmd/assets/main.go | 4 - backend/cmd/db/main.go | 157 +- backend/cmd/ender/main.go | 10 +- backend/cmd/heuristics/main.go | 79 +- backend/cmd/http/main.go | 4 - backend/cmd/integrations/main.go | 4 - backend/cmd/sink/main.go | 6 +- backend/cmd/storage/main.go | 4 - backend/internal/config/heuristics/config.go | 4 + backend/internal/db/datasaver/messages.go | 74 - backend/internal/db/datasaver/methods.go | 19 + backend/internal/db/datasaver/saver.go | 122 +- backend/internal/db/datasaver/stats.go | 29 - backend/internal/db/service.go | 56 + backend/internal/heuristics/service.go | 64 + backend/internal/service/service.go | 5 + backend/internal/sessionender/ender.go | 4 +- backend/pkg/db/cache/messages-common.go | 6 +- backend/pkg/db/cache/messages-ios.go | 30 +- backend/pkg/db/cache/messages-web.go | 30 +- backend/pkg/db/clickhouse/connector.go | 24 + backend/pkg/db/postgres/batches.go | 2 - backend/pkg/db/postgres/bulks.go | 3 - backend/pkg/db/postgres/connector.go | 2 +- backend/pkg/db/postgres/messages-ios.go | 24 +- backend/pkg/db/postgres/messages-web-stats.go | 22 +- backend/pkg/db/postgres/messages-web.go | 7 +- backend/pkg/db/types/error-event.go | 12 + backend/pkg/handlers/custom/eventMapper.go | 82 - .../pkg/handlers/custom/inputEventBuilder.go | 8 +- .../pkg/handlers/custom/pageEventBuilder.go | 10 +- backend/pkg/handlers/ios/clickRage.go | 2 +- backend/pkg/handlers/messageProcessor.go | 2 +- backend/pkg/handlers/web/clickRage.go | 60 +- backend/pkg/handlers/web/cpuIssue.go | 82 +- backend/pkg/handlers/web/deadClick.go | 63 +- backend/pkg/handlers/web/domDrop.go | 55 - backend/pkg/handlers/web/memoryIssue.go | 11 +- backend/pkg/handlers/web/networkIssue.go | 11 +- .../pkg/handlers/web/performanceAggregator.go | 12 +- backend/pkg/messages/filters.go | 4 +- backend/pkg/messages/iterator-ender.go | 10 +- backend/pkg/messages/iterator-sink.go | 10 +- backend/pkg/messages/iterator.go | 19 +- backend/pkg/messages/message.go | 12 +- backend/pkg/messages/messages.go | 803 +++--- backend/pkg/messages/raw.go | 14 + backend/pkg/messages/read-message.go | 2302 ++++++++--------- backend/pkg/sessions/builder.go | 41 +- backend/pkg/sessions/builderMap.go | 124 +- backend/pkg/terminator/terminator.go | 22 + backend/pkg/url/url.go | 35 +- ee/backend/internal/db/datasaver/fts.go | 106 +- ee/backend/internal/db/datasaver/messages.go | 114 - ee/backend/internal/db/datasaver/methods.go | 83 + ee/backend/internal/db/datasaver/saver.go | 24 - ee/backend/internal/db/datasaver/stats.go | 56 - ee/backend/pkg/db/clickhouse/connector.go | 27 +- ee/connectors/msgcodec/messages.py | 36 - ee/connectors/msgcodec/msgcodec.py | 30 - mobs/messages.rb | 29 - 61 files changed, 2342 insertions(+), 2764 deletions(-) delete mode 100644 backend/internal/db/datasaver/messages.go create mode 100644 backend/internal/db/datasaver/methods.go delete mode 100644 backend/internal/db/datasaver/stats.go create mode 100644 backend/internal/db/service.go create mode 100644 backend/internal/heuristics/service.go create mode 100644 backend/internal/service/service.go create mode 100644 backend/pkg/db/clickhouse/connector.go delete mode 100644 backend/pkg/handlers/custom/eventMapper.go delete mode 100644 backend/pkg/handlers/web/domDrop.go create mode 100644 backend/pkg/terminator/terminator.go delete mode 100644 ee/backend/internal/db/datasaver/messages.go create mode 100644 ee/backend/internal/db/datasaver/methods.go delete mode 100644 ee/backend/internal/db/datasaver/saver.go delete mode 100644 ee/backend/internal/db/datasaver/stats.go diff --git a/backend/cmd/assets/main.go b/backend/cmd/assets/main.go index b05ecbe52..16eac7cb5 100644 --- a/backend/cmd/assets/main.go +++ b/backend/cmd/assets/main.go @@ -13,7 +13,6 @@ import ( "openreplay/backend/pkg/messages" "openreplay/backend/pkg/metrics" assetsMetrics "openreplay/backend/pkg/metrics/assets" - "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" ) @@ -24,9 +23,6 @@ func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := config.New() - if cfg.UseProfiler { - pprof.StartProfilingServer() - } cacher := cacher.NewCacher(cfg) diff --git a/backend/cmd/db/main.go b/backend/cmd/db/main.go index 84b0d81ed..ae1228cc3 100644 --- a/backend/cmd/db/main.go +++ b/backend/cmd/db/main.go @@ -1,174 +1,59 @@ package main import ( - "errors" "log" - "os" - "os/signal" - "syscall" - "time" - "openreplay/backend/internal/config/db" + config "openreplay/backend/internal/config/db" + "openreplay/backend/internal/db" "openreplay/backend/internal/db/datasaver" "openreplay/backend/pkg/db/cache" "openreplay/backend/pkg/db/postgres" - types2 "openreplay/backend/pkg/db/types" - "openreplay/backend/pkg/handlers" - custom2 "openreplay/backend/pkg/handlers/custom" "openreplay/backend/pkg/messages" "openreplay/backend/pkg/metrics" databaseMetrics "openreplay/backend/pkg/metrics/database" - "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" - "openreplay/backend/pkg/sessions" + "openreplay/backend/pkg/terminator" ) func main() { + log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) + m := metrics.New() m.Register(databaseMetrics.List()) - log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) - - cfg := db.New() - if cfg.UseProfiler { - pprof.StartProfilingServer() - } + cfg := config.New() // Init database pg := cache.NewPGCache( postgres.NewConn(cfg.Postgres.String(), cfg.BatchQueueLimit, cfg.BatchSizeLimit), cfg.ProjectExpirationTimeoutMs) defer pg.Close() - // HandlersFabric returns the list of message handlers we want to be applied to each incoming message. - handlersFabric := func() []handlers.MessageProcessor { - return []handlers.MessageProcessor{ - &custom2.EventMapper{}, - custom2.NewInputEventBuilder(), - custom2.NewPageEventBuilder(), - } - } - - // Create handler's aggregator - builderMap := sessions.NewBuilderMap(handlersFabric) - - // Init modules - saver := datasaver.New(pg, cfg) - saver.InitStats() + // Init data saver + saver := datasaver.New(cfg, pg) + // Message filter msgFilter := []int{messages.MsgMetadata, messages.MsgIssueEvent, messages.MsgSessionStart, messages.MsgSessionEnd, - messages.MsgUserID, messages.MsgUserAnonymousID, messages.MsgClickEvent, - messages.MsgIntegrationEvent, messages.MsgPerformanceTrackAggr, - messages.MsgJSException, messages.MsgResourceTiming, - messages.MsgCustomEvent, messages.MsgCustomIssue, messages.MsgFetch, messages.MsgNetworkRequest, messages.MsgGraphQL, - messages.MsgStateAction, messages.MsgSetInputTarget, messages.MsgSetInputValue, messages.MsgCreateDocument, - messages.MsgMouseClick, messages.MsgSetPageLocation, messages.MsgPageLoadTiming, messages.MsgPageRenderTiming} - - // Handler logic - msgHandler := func(msg messages.Message) { - // Just save session data into db without additional checks - if err := saver.InsertMessage(msg); err != nil { - if !postgres.IsPkeyViolation(err) { - log.Printf("Message Insertion Error %v, SessionID: %v, Message: %v", err, msg.SessionID(), msg) - } - return - } - - var ( - session *types2.Session - err error - ) - if msg.TypeID() == messages.MsgSessionEnd { - session, err = pg.GetSession(msg.SessionID()) - } else { - session, err = pg.Cache.GetSession(msg.SessionID()) - } - if session == nil { - if err != nil && !errors.Is(err, cache.NilSessionInCacheError) { - log.Printf("Error on session retrieving from cache: %v, SessionID: %v, Message: %v", err, msg.SessionID(), msg) - } - return - } - - // Save statistics to db - err = saver.InsertStats(session, msg) - if err != nil { - log.Printf("Stats Insertion Error %v; Session: %v, Message: %v", err, session, msg) - } - - // Handle heuristics and save to temporary queue in memory - builderMap.HandleMessage(msg) - - // Process saved heuristics messages as usual messages above in the code - builderMap.IterateSessionReadyMessages(msg.SessionID(), func(msg messages.Message) { - if err := saver.InsertMessage(msg); err != nil { - if !postgres.IsPkeyViolation(err) { - log.Printf("Message Insertion Error %v; Session: %v, Message %v", err, session, msg) - } - return - } - - if err := saver.InsertStats(session, msg); err != nil { - log.Printf("Stats Insertion Error %v; Session: %v, Message %v", err, session, msg) - } - }) - } + messages.MsgUserID, messages.MsgUserAnonymousID, messages.MsgIntegrationEvent, messages.MsgPerformanceTrackAggr, + messages.MsgJSException, messages.MsgResourceTiming, messages.MsgCustomEvent, messages.MsgCustomIssue, + messages.MsgFetch, messages.MsgNetworkRequest, messages.MsgGraphQL, messages.MsgStateAction, + messages.MsgSetInputTarget, messages.MsgSetInputValue, messages.MsgCreateDocument, messages.MsgMouseClick, + messages.MsgSetPageLocation, messages.MsgPageLoadTiming, messages.MsgPageRenderTiming, + messages.MsgInputEvent, messages.MsgPageEvent} // Init consumer consumer := queue.NewConsumer( cfg.GroupDB, []string{ - cfg.TopicRawWeb, // from tracker - cfg.TopicAnalytics, // from heuristics + cfg.TopicRawWeb, + cfg.TopicAnalytics, }, - messages.NewMessageIterator(msgHandler, msgFilter, true), + messages.NewMessageIterator(saver.Handle, msgFilter, true), false, cfg.MessageSizeLimit, ) + // Run service and wait for TERM signal + service := db.New(cfg, consumer, saver) log.Printf("Db service started\n") - - sigchan := make(chan os.Signal, 1) - signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM) - - commitTick := time.Tick(cfg.CommitBatchTimeout) - - // Send collected batches to db - commitDBUpdates := func() { - // Commit collected batches and bulks of information to PG - pg.Commit() - // Commit collected batches of information to CH - if err := saver.CommitStats(); err != nil { - log.Printf("Error on stats commit: %v", err) - } - // Commit current position in queue - if err := consumer.Commit(); err != nil { - log.Printf("Error on consumer commit: %v", err) - } - } - - for { - select { - case sig := <-sigchan: - log.Printf("Caught signal %s: terminating\n", sig.String()) - commitDBUpdates() - if err := pg.Close(); err != nil { - log.Printf("db.Close error: %s", err) - } - if err := saver.Close(); err != nil { - log.Printf("saver.Close error: %s", err) - } - consumer.Close() - os.Exit(0) - case <-commitTick: - commitDBUpdates() - builderMap.ClearOldSessions() - case msg := <-consumer.Rebalanced(): - log.Println(msg) - default: - // Handle new message from queue - if err := consumer.ConsumeNext(); err != nil { - log.Fatalf("Error on consumption: %v", err) - } - } - } + terminator.Wait(service) } diff --git a/backend/cmd/ender/main.go b/backend/cmd/ender/main.go index da7ca9b89..84d816a33 100644 --- a/backend/cmd/ender/main.go +++ b/backend/cmd/ender/main.go @@ -18,7 +18,6 @@ import ( "openreplay/backend/pkg/metrics" databaseMetrics "openreplay/backend/pkg/metrics/database" enderMetrics "openreplay/backend/pkg/metrics/ender" - "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" ) @@ -30,9 +29,6 @@ func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := ender.New() - if cfg.UseProfiler { - pprof.StartProfilingServer() - } pg := cache.NewPGCache(postgres.NewConn(cfg.Postgres.String(), 0, 0), cfg.ProjectExpirationTimeoutMs) defer pg.Close() @@ -72,12 +68,12 @@ func main() { consumer.Close() os.Exit(0) case <-tick: - failedSessionEnds := make(map[uint64]int64) + failedSessionEnds := make(map[uint64]uint64) duplicatedSessionEnds := make(map[uint64]uint64) // Find ended sessions and send notification to other services - sessions.HandleEndedSessions(func(sessionID uint64, timestamp int64) bool { - msg := &messages.SessionEnd{Timestamp: uint64(timestamp)} + sessions.HandleEndedSessions(func(sessionID uint64, timestamp uint64) bool { + msg := &messages.SessionEnd{Timestamp: timestamp} currDuration, err := pg.GetSessionDuration(sessionID) if err != nil { log.Printf("getSessionDuration failed, sessID: %d, err: %s", sessionID, err) diff --git a/backend/cmd/heuristics/main.go b/backend/cmd/heuristics/main.go index ac55b83bc..073f48611 100644 --- a/backend/cmd/heuristics/main.go +++ b/backend/cmd/heuristics/main.go @@ -2,90 +2,49 @@ package main import ( "log" - "openreplay/backend/pkg/pprof" - "os" - "os/signal" - "syscall" - "time" - - "openreplay/backend/internal/config/heuristics" + config "openreplay/backend/internal/config/heuristics" + "openreplay/backend/internal/heuristics" "openreplay/backend/pkg/handlers" - web2 "openreplay/backend/pkg/handlers/web" - "openreplay/backend/pkg/intervals" + "openreplay/backend/pkg/handlers/custom" + "openreplay/backend/pkg/handlers/web" "openreplay/backend/pkg/messages" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/sessions" + "openreplay/backend/pkg/terminator" ) func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) - - cfg := heuristics.New() - if cfg.UseProfiler { - pprof.StartProfilingServer() - } + cfg := config.New() // HandlersFabric returns the list of message handlers we want to be applied to each incoming message. handlersFabric := func() []handlers.MessageProcessor { return []handlers.MessageProcessor{ - // web handlers - &web2.ClickRageDetector{}, - &web2.CpuIssueDetector{}, - &web2.DeadClickDetector{}, - &web2.MemoryIssueDetector{}, - &web2.NetworkIssueDetector{}, - &web2.PerformanceAggregator{}, - // Other handlers (you can add your custom handlers here) - //&custom.CustomHandler{}, + custom.NewInputEventBuilder(), + custom.NewPageEventBuilder(), + web.NewDeadClickDetector(), + &web.ClickRageDetector{}, + &web.CpuIssueDetector{}, + &web.MemoryIssueDetector{}, + &web.NetworkIssueDetector{}, + &web.PerformanceAggregator{}, } } - // Create handler's aggregator - builderMap := sessions.NewBuilderMap(handlersFabric) - - // Init producer and consumer for data bus + eventBuilder := sessions.NewBuilderMap(handlersFabric) producer := queue.NewProducer(cfg.MessageSizeLimit, true) - - msgHandler := func(msg messages.Message) { - builderMap.HandleMessage(msg) - } - consumer := queue.NewConsumer( cfg.GroupHeuristics, []string{ cfg.TopicRawWeb, }, - messages.NewMessageIterator(msgHandler, nil, true), + messages.NewMessageIterator(eventBuilder.HandleMessage, nil, true), false, cfg.MessageSizeLimit, ) + // Run service and wait for TERM signal + service := heuristics.New(cfg, producer, consumer, eventBuilder) log.Printf("Heuristics service started\n") - - sigchan := make(chan os.Signal, 1) - signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM) - - tick := time.Tick(intervals.EVENTS_COMMIT_INTERVAL * time.Millisecond) - for { - select { - case sig := <-sigchan: - log.Printf("Caught signal %v: terminating\n", sig) - producer.Close(cfg.ProducerTimeout) - consumer.Commit() - consumer.Close() - os.Exit(0) - case <-tick: - builderMap.IterateReadyMessages(func(sessionID uint64, readyMsg messages.Message) { - producer.Produce(cfg.TopicAnalytics, sessionID, readyMsg.Encode()) - }) - producer.Flush(cfg.ProducerTimeout) - consumer.Commit() - case msg := <-consumer.Rebalanced(): - log.Println(msg) - default: - if err := consumer.ConsumeNext(); err != nil { - log.Fatalf("Error on consuming: %v", err) - } - } - } + terminator.Wait(service) } diff --git a/backend/cmd/http/main.go b/backend/cmd/http/main.go index 83eedaf29..74c58f92b 100644 --- a/backend/cmd/http/main.go +++ b/backend/cmd/http/main.go @@ -15,7 +15,6 @@ import ( "openreplay/backend/pkg/metrics" databaseMetrics "openreplay/backend/pkg/metrics/database" httpMetrics "openreplay/backend/pkg/metrics/http" - "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" ) @@ -27,9 +26,6 @@ func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := http.New() - if cfg.UseProfiler { - pprof.StartProfilingServer() - } // Connect to queue producer := queue.NewProducer(cfg.MessageSizeLimit, true) diff --git a/backend/cmd/integrations/main.go b/backend/cmd/integrations/main.go index 3fa07ee9c..c179650b9 100644 --- a/backend/cmd/integrations/main.go +++ b/backend/cmd/integrations/main.go @@ -13,7 +13,6 @@ import ( "openreplay/backend/pkg/intervals" "openreplay/backend/pkg/metrics" databaseMetrics "openreplay/backend/pkg/metrics/database" - "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/token" ) @@ -25,9 +24,6 @@ func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := config.New() - if cfg.UseProfiler { - pprof.StartProfilingServer() - } pg := postgres.NewConn(cfg.Postgres.String(), 0, 0) defer pg.Close() diff --git a/backend/cmd/sink/main.go b/backend/cmd/sink/main.go index 4bbaeeee4..e9cf1367a 100644 --- a/backend/cmd/sink/main.go +++ b/backend/cmd/sink/main.go @@ -16,7 +16,6 @@ import ( "openreplay/backend/pkg/messages" "openreplay/backend/pkg/metrics" sinkMetrics "openreplay/backend/pkg/metrics/sink" - "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/url/assets" ) @@ -27,9 +26,6 @@ func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := sink.New() - if cfg.UseProfiler { - pprof.StartProfilingServer() - } if _, err := os.Stat(cfg.FsDir); os.IsNotExist(err) { log.Fatalf("%v doesn't exist. %v", cfg.FsDir, err) @@ -112,7 +108,7 @@ func main() { log.Printf("zero ts; sessID: %d, msgType: %d", msg.SessionID(), msg.TypeID()) } else { // Log ts of last processed message - counter.Update(msg.SessionID(), time.UnixMilli(ts)) + counter.Update(msg.SessionID(), time.UnixMilli(int64(ts))) } // Try to encode message to avoid null data inserts diff --git a/backend/cmd/storage/main.go b/backend/cmd/storage/main.go index 472324b95..2a1f6a402 100644 --- a/backend/cmd/storage/main.go +++ b/backend/cmd/storage/main.go @@ -13,7 +13,6 @@ import ( "openreplay/backend/pkg/messages" "openreplay/backend/pkg/metrics" storageMetrics "openreplay/backend/pkg/metrics/storage" - "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" cloud "openreplay/backend/pkg/storage" ) @@ -25,9 +24,6 @@ func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := config.New() - if cfg.UseProfiler { - pprof.StartProfilingServer() - } s3 := cloud.NewS3(cfg.S3Region, cfg.S3Bucket) srv, err := storage.New(cfg, s3) diff --git a/backend/internal/config/heuristics/config.go b/backend/internal/config/heuristics/config.go index 6552944a3..d222387c5 100644 --- a/backend/internal/config/heuristics/config.go +++ b/backend/internal/config/heuristics/config.go @@ -3,6 +3,7 @@ package heuristics import ( "openreplay/backend/internal/config/common" "openreplay/backend/internal/config/configurator" + "openreplay/backend/pkg/pprof" ) type Config struct { @@ -19,5 +20,8 @@ type Config struct { func New() *Config { cfg := &Config{} configurator.Process(cfg) + if cfg.UseProfiler { + pprof.StartProfilingServer() + } return cfg } diff --git a/backend/internal/db/datasaver/messages.go b/backend/internal/db/datasaver/messages.go deleted file mode 100644 index 12e7152b4..000000000 --- a/backend/internal/db/datasaver/messages.go +++ /dev/null @@ -1,74 +0,0 @@ -package datasaver - -import ( - "fmt" - . "openreplay/backend/pkg/messages" -) - -func (mi *Saver) InsertMessage(msg Message) error { - sessionID := msg.SessionID() - switch m := msg.(type) { - // Common - case *Metadata: - if err := mi.pg.InsertMetadata(sessionID, m); err != nil { - return fmt.Errorf("insert metadata err: %s", err) - } - return nil - case *IssueEvent: - return mi.pg.InsertIssueEvent(sessionID, m) - //TODO: message adapter (transformer) (at the level of pkg/message) for types: *IOSMetadata, *IOSIssueEvent and others - - // Web - case *SessionStart: - return mi.pg.HandleWebSessionStart(sessionID, m) - case *SessionEnd: - return mi.pg.HandleWebSessionEnd(sessionID, m) - case *UserID: - return mi.pg.InsertWebUserID(sessionID, m) - case *UserAnonymousID: - return mi.pg.InsertWebUserAnonymousID(sessionID, m) - case *CustomEvent: - return mi.pg.InsertWebCustomEvent(sessionID, m) - case *ClickEvent: - return mi.pg.InsertWebClickEvent(sessionID, m) - case *InputEvent: - return mi.pg.InsertWebInputEvent(sessionID, m) - - // Unique Web messages - case *PageEvent: - return mi.pg.InsertWebPageEvent(sessionID, m) - case *NetworkRequest: - return mi.pg.InsertWebNetworkRequest(sessionID, m) - case *GraphQL: - return mi.pg.InsertWebGraphQL(sessionID, m) - case *JSException: - return mi.pg.InsertWebJSException(m) - case *IntegrationEvent: - return mi.pg.InsertWebIntegrationEvent(m) - - // IOS - case *IOSSessionStart: - return mi.pg.InsertIOSSessionStart(sessionID, m) - case *IOSSessionEnd: - return mi.pg.InsertIOSSessionEnd(sessionID, m) - case *IOSUserID: - return mi.pg.InsertIOSUserID(sessionID, m) - case *IOSUserAnonymousID: - return mi.pg.InsertIOSUserAnonymousID(sessionID, m) - case *IOSCustomEvent: - return mi.pg.InsertIOSCustomEvent(sessionID, m) - case *IOSClickEvent: - return mi.pg.InsertIOSClickEvent(sessionID, m) - case *IOSInputEvent: - return mi.pg.InsertIOSInputEvent(sessionID, m) - // Unique IOS messages - case *IOSNetworkCall: - return mi.pg.InsertIOSNetworkCall(sessionID, m) - case *IOSScreenEnter: - return mi.pg.InsertIOSScreenEnter(sessionID, m) - case *IOSCrash: - return mi.pg.InsertIOSCrash(sessionID, m) - - } - return nil // "Not implemented" -} diff --git a/backend/internal/db/datasaver/methods.go b/backend/internal/db/datasaver/methods.go new file mode 100644 index 000000000..c4e83cf09 --- /dev/null +++ b/backend/internal/db/datasaver/methods.go @@ -0,0 +1,19 @@ +package datasaver + +import ( + . "openreplay/backend/pkg/messages" +) + +func (s *saverImpl) init() { + // noop +} + +func (s *saverImpl) handleExtraMessage(msg Message) error { + switch m := msg.(type) { + case *PerformanceTrackAggr: + return s.pg.InsertWebStatsPerformance(m) + case *ResourceTiming: + return s.pg.InsertWebStatsResourceEvent(m) + } + return nil +} diff --git a/backend/internal/db/datasaver/saver.go b/backend/internal/db/datasaver/saver.go index 2a356d120..92dbff958 100644 --- a/backend/internal/db/datasaver/saver.go +++ b/backend/internal/db/datasaver/saver.go @@ -1,16 +1,126 @@ package datasaver import ( + "log" + "openreplay/backend/internal/config/db" "openreplay/backend/pkg/db/cache" - "openreplay/backend/pkg/queue/types" + "openreplay/backend/pkg/db/clickhouse" + "openreplay/backend/pkg/db/postgres" + "openreplay/backend/pkg/db/types" + . "openreplay/backend/pkg/messages" + queue "openreplay/backend/pkg/queue/types" ) -type Saver struct { - pg *cache.PGCache - producer types.Producer +type Saver interface { + Handle(msg Message) + Commit() error + Close() error } -func New(pg *cache.PGCache, _ *db.Config) *Saver { - return &Saver{pg: pg, producer: nil} +type saverImpl struct { + cfg *db.Config + pg *cache.PGCache + ch clickhouse.Connector + producer queue.Producer +} + +func New(cfg *db.Config, pg *cache.PGCache) Saver { + s := &saverImpl{cfg: cfg, pg: pg} + s.init() + return s +} + +func (s *saverImpl) Handle(msg Message) { + if msg.TypeID() == MsgCustomEvent { + defer s.Handle(types.WrapCustomEvent(msg.(*CustomEvent))) + } + if err := s.handleMessage(msg); err != nil { + if !postgres.IsPkeyViolation(err) { + log.Printf("Message Insertion Error %v, SessionID: %v, Message: %v", err, msg.SessionID(), msg) + } + return + } + if err := s.handleExtraMessage(msg); err != nil { + log.Printf("Stats Insertion Error %v; Session: %d, Message: %v", err, msg.SessionID(), msg) + } + return +} + +func (s *saverImpl) handleMessage(msg Message) error { + switch m := msg.(type) { + case *Metadata: + return s.pg.InsertMetadata(m) + case *IssueEvent: + return s.pg.InsertIssueEvent(m) + case *SessionStart: + return s.pg.HandleWebSessionStart(m) + case *SessionEnd: + return s.pg.HandleWebSessionEnd(m) + case *UserID: + return s.pg.InsertWebUserID(m) + case *UserAnonymousID: + return s.pg.InsertWebUserAnonymousID(m) + case *CustomEvent: + return s.pg.InsertWebCustomEvent(m) + case *MouseClick: + return s.pg.InsertWebClickEvent(m) + case *InputEvent: + return s.pg.InsertWebInputEvent(m) + case *PageEvent: + return s.pg.InsertWebPageEvent(m) + case *NetworkRequest: + return s.pg.InsertWebNetworkRequest(m) + case *GraphQL: + return s.pg.InsertWebGraphQL(m) + case *JSException: + return s.pg.InsertWebJSException(m) + case *IntegrationEvent: + return s.pg.InsertWebIntegrationEvent(m) + case *IOSSessionStart: + return s.pg.InsertIOSSessionStart(m) + case *IOSSessionEnd: + return s.pg.InsertIOSSessionEnd(m) + case *IOSUserID: + return s.pg.InsertIOSUserID(m) + case *IOSUserAnonymousID: + return s.pg.InsertIOSUserAnonymousID(m) + case *IOSCustomEvent: + return s.pg.InsertIOSCustomEvent(m) + case *IOSClickEvent: + return s.pg.InsertIOSClickEvent(m) + case *IOSInputEvent: + return s.pg.InsertIOSInputEvent(m) + case *IOSNetworkCall: + return s.pg.InsertIOSNetworkCall(m) + case *IOSScreenEnter: + return s.pg.InsertIOSScreenEnter(m) + case *IOSCrash: + return s.pg.InsertIOSCrash(m) + } + return nil +} + +func (s *saverImpl) Commit() error { + if s.pg != nil { + s.pg.Commit() + } + if s.ch != nil { + s.ch.Commit() + } + return nil +} + +func (s *saverImpl) Close() error { + if s.pg != nil { + if err := s.pg.Close(); err != nil { + log.Printf("pg.Close error: %s", err) + } + } + if s.ch != nil { + if err := s.ch.Stop(); err != nil { + log.Printf("ch.Close error: %s", err) + } + } + return nil } diff --git a/backend/internal/db/datasaver/stats.go b/backend/internal/db/datasaver/stats.go deleted file mode 100644 index c7daeb3dc..000000000 --- a/backend/internal/db/datasaver/stats.go +++ /dev/null @@ -1,29 +0,0 @@ -package datasaver - -import ( - . "openreplay/backend/pkg/db/types" - . "openreplay/backend/pkg/messages" -) - -func (si *Saver) InitStats() { - // noop -} - -func (si *Saver) InsertStats(session *Session, msg Message) error { - switch m := msg.(type) { - // Web - case *PerformanceTrackAggr: - return si.pg.InsertWebStatsPerformance(session.SessionID, m) - case *ResourceEvent: - return si.pg.InsertWebStatsResourceEvent(session.SessionID, m) - } - return nil -} - -func (si *Saver) CommitStats() error { - return nil -} - -func (si *Saver) Close() error { - return nil -} diff --git a/backend/internal/db/service.go b/backend/internal/db/service.go new file mode 100644 index 000000000..69b5cb1cb --- /dev/null +++ b/backend/internal/db/service.go @@ -0,0 +1,56 @@ +package db + +import ( + "log" + "time" + + "openreplay/backend/internal/config/db" + "openreplay/backend/internal/db/datasaver" + "openreplay/backend/internal/service" + "openreplay/backend/pkg/queue/types" +) + +type dbImpl struct { + cfg *db.Config + consumer types.Consumer + saver datasaver.Saver +} + +func New(cfg *db.Config, consumer types.Consumer, saver datasaver.Saver) service.Interface { + s := &dbImpl{ + cfg: cfg, + consumer: consumer, + saver: saver, + } + go s.run() + return s +} + +func (d *dbImpl) run() { + commitTick := time.Tick(d.cfg.CommitBatchTimeout) + for { + select { + case <-commitTick: + d.commit() + case msg := <-d.consumer.Rebalanced(): + log.Println(msg) + default: + if err := d.consumer.ConsumeNext(); err != nil { + log.Fatalf("Error on consumption: %v", err) + } + } + } +} + +func (d *dbImpl) commit() { + d.saver.Commit() + d.consumer.Commit() +} + +func (d *dbImpl) Stop() { + d.commit() + if err := d.saver.Close(); err != nil { + log.Printf("saver.Close error: %s", err) + } + d.consumer.Close() +} diff --git a/backend/internal/heuristics/service.go b/backend/internal/heuristics/service.go new file mode 100644 index 000000000..0063f79f7 --- /dev/null +++ b/backend/internal/heuristics/service.go @@ -0,0 +1,64 @@ +package heuristics + +import ( + "log" + "time" + + "openreplay/backend/internal/config/heuristics" + "openreplay/backend/internal/service" + "openreplay/backend/pkg/queue/types" + "openreplay/backend/pkg/sessions" +) + +type heuristicsImpl struct { + cfg *heuristics.Config + producer types.Producer + consumer types.Consumer + events sessions.EventBuilder +} + +func New(cfg *heuristics.Config, p types.Producer, c types.Consumer, e sessions.EventBuilder) service.Interface { + s := &heuristicsImpl{ + cfg: cfg, + producer: p, + consumer: c, + events: e, + } + go s.run() + return s +} + +func (h *heuristicsImpl) run() { + tick := time.Tick(10 * time.Second) + for { + select { + case evt := <-h.events.Events(): + if err := h.producer.Produce(h.cfg.TopicAnalytics, evt.SessionID(), evt.Encode()); err != nil { + log.Printf("can't send new event to queue: %s", err) + } + case <-tick: + h.producer.Flush(h.cfg.ProducerTimeout) + h.consumer.Commit() + case msg := <-h.consumer.Rebalanced(): + log.Println(msg) + default: + if err := h.consumer.ConsumeNext(); err != nil { + log.Fatalf("Error on consuming: %v", err) + } + } + } +} + +func (h *heuristicsImpl) Stop() { + // Stop event builder and flush all events + log.Println("stopping heuristics service") + h.events.Stop() + for evt := range h.events.Events() { + if err := h.producer.Produce(h.cfg.TopicAnalytics, evt.SessionID(), evt.Encode()); err != nil { + log.Printf("can't send new event to queue: %s", err) + } + } + h.producer.Close(h.cfg.ProducerTimeout) + h.consumer.Commit() + h.consumer.Close() +} diff --git a/backend/internal/service/service.go b/backend/internal/service/service.go new file mode 100644 index 000000000..a20254093 --- /dev/null +++ b/backend/internal/service/service.go @@ -0,0 +1,5 @@ +package service + +type Interface interface { + Stop() +} diff --git a/backend/internal/sessionender/ender.go b/backend/internal/sessionender/ender.go index e1ddb0ffe..26fcf850e 100644 --- a/backend/internal/sessionender/ender.go +++ b/backend/internal/sessionender/ender.go @@ -9,13 +9,13 @@ import ( ) // EndedSessionHandler handler for ended sessions -type EndedSessionHandler func(sessionID uint64, timestamp int64) bool +type EndedSessionHandler func(sessionID uint64, timestamp uint64) bool // session holds information about user's session live status type session struct { lastTimestamp int64 lastUpdate int64 - lastUserTime int64 + lastUserTime uint64 isEnded bool } diff --git a/backend/pkg/db/cache/messages-common.go b/backend/pkg/db/cache/messages-common.go index 3fc52f395..763f97d90 100644 --- a/backend/pkg/db/cache/messages-common.go +++ b/backend/pkg/db/cache/messages-common.go @@ -21,7 +21,8 @@ func (c *PGCache) HandleSessionEnd(sessionID uint64) error { return nil } -func (c *PGCache) InsertIssueEvent(sessionID uint64, crash *IssueEvent) error { +func (c *PGCache) InsertIssueEvent(crash *IssueEvent) error { + sessionID := crash.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err @@ -29,7 +30,8 @@ func (c *PGCache) InsertIssueEvent(sessionID uint64, crash *IssueEvent) error { return c.Conn.InsertIssueEvent(sessionID, session.ProjectID, crash) } -func (c *PGCache) InsertMetadata(sessionID uint64, metadata *Metadata) error { +func (c *PGCache) InsertMetadata(metadata *Metadata) error { + sessionID := metadata.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err diff --git a/backend/pkg/db/cache/messages-ios.go b/backend/pkg/db/cache/messages-ios.go index 961b78dad..93367f925 100644 --- a/backend/pkg/db/cache/messages-ios.go +++ b/backend/pkg/db/cache/messages-ios.go @@ -6,7 +6,8 @@ import ( . "openreplay/backend/pkg/messages" ) -func (c *PGCache) InsertIOSSessionStart(sessionID uint64, s *IOSSessionStart) error { +func (c *PGCache) InsertIOSSessionStart(s *IOSSessionStart) error { + sessionID := s.SessionID() if c.Cache.HasSession(sessionID) { return fmt.Errorf("session %d already in cache", sessionID) } @@ -33,13 +34,15 @@ func (c *PGCache) InsertIOSSessionStart(sessionID uint64, s *IOSSessionStart) er return nil } -func (c *PGCache) InsertIOSSessionEnd(sessionID uint64, e *IOSSessionEnd) error { +func (c *PGCache) InsertIOSSessionEnd(e *IOSSessionEnd) error { + sessionID := e.SessionID() _, err := c.InsertSessionEnd(sessionID, e.Timestamp) return err } -func (c *PGCache) InsertIOSScreenEnter(sessionID uint64, screenEnter *IOSScreenEnter) error { - if err := c.Conn.InsertIOSScreenEnter(sessionID, screenEnter); err != nil { +func (c *PGCache) InsertIOSScreenEnter(screenEnter *IOSScreenEnter) error { + sessionID := screenEnter.SessionID() + if err := c.Conn.InsertIOSScreenEnter(screenEnter); err != nil { return err } session, err := c.Cache.GetSession(sessionID) @@ -50,8 +53,9 @@ func (c *PGCache) InsertIOSScreenEnter(sessionID uint64, screenEnter *IOSScreenE return nil } -func (c *PGCache) InsertIOSClickEvent(sessionID uint64, clickEvent *IOSClickEvent) error { - if err := c.Conn.InsertIOSClickEvent(sessionID, clickEvent); err != nil { +func (c *PGCache) InsertIOSClickEvent(clickEvent *IOSClickEvent) error { + sessionID := clickEvent.SessionID() + if err := c.Conn.InsertIOSClickEvent(clickEvent); err != nil { return err } session, err := c.Cache.GetSession(sessionID) @@ -62,8 +66,9 @@ func (c *PGCache) InsertIOSClickEvent(sessionID uint64, clickEvent *IOSClickEven return nil } -func (c *PGCache) InsertIOSInputEvent(sessionID uint64, inputEvent *IOSInputEvent) error { - if err := c.Conn.InsertIOSInputEvent(sessionID, inputEvent); err != nil { +func (c *PGCache) InsertIOSInputEvent(inputEvent *IOSInputEvent) error { + sessionID := inputEvent.SessionID() + if err := c.Conn.InsertIOSInputEvent(inputEvent); err != nil { return err } session, err := c.Cache.GetSession(sessionID) @@ -74,18 +79,15 @@ func (c *PGCache) InsertIOSInputEvent(sessionID uint64, inputEvent *IOSInputEven return nil } -func (c *PGCache) InsertIOSCrash(sessionID uint64, crash *IOSCrash) error { +func (c *PGCache) InsertIOSCrash(crash *IOSCrash) error { + sessionID := crash.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err } - if err := c.Conn.InsertIOSCrash(sessionID, session.ProjectID, crash); err != nil { + if err := c.Conn.InsertIOSCrash(session.ProjectID, crash); err != nil { return err } session.ErrorsCount += 1 return nil } - -func (c *PGCache) InsertIOSIssueEvent(sessionID uint64, issueEvent *IOSIssueEvent) error { - return nil -} diff --git a/backend/pkg/db/cache/messages-web.go b/backend/pkg/db/cache/messages-web.go index 1df3d1520..0a870e5a2 100644 --- a/backend/pkg/db/cache/messages-web.go +++ b/backend/pkg/db/cache/messages-web.go @@ -30,7 +30,8 @@ func (c *PGCache) InsertWebSessionStart(sessionID uint64, s *SessionStart) error }) } -func (c *PGCache) HandleWebSessionStart(sessionID uint64, s *SessionStart) error { +func (c *PGCache) HandleWebSessionStart(s *SessionStart) error { + sessionID := s.SessionID() if c.Cache.HasSession(sessionID) { return fmt.Errorf("session %d already in cache", sessionID) } @@ -69,7 +70,8 @@ func (c *PGCache) InsertWebSessionEnd(sessionID uint64, e *SessionEnd) error { return err } -func (c *PGCache) HandleWebSessionEnd(sessionID uint64, e *SessionEnd) error { +func (c *PGCache) HandleWebSessionEnd(e *SessionEnd) error { + sessionID := e.SessionID() return c.HandleSessionEnd(sessionID) } @@ -99,7 +101,8 @@ func (c *PGCache) InsertSessionReferrer(sessionID uint64, referrer string) error return c.Conn.InsertSessionReferrer(sessionID, referrer) } -func (c *PGCache) InsertWebNetworkRequest(sessionID uint64, e *NetworkRequest) error { +func (c *PGCache) InsertWebNetworkRequest(e *NetworkRequest) error { + sessionID := e.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err @@ -111,7 +114,8 @@ func (c *PGCache) InsertWebNetworkRequest(sessionID uint64, e *NetworkRequest) e return c.Conn.InsertWebNetworkRequest(sessionID, session.ProjectID, project.SaveRequestPayloads, e) } -func (c *PGCache) InsertWebGraphQL(sessionID uint64, e *GraphQL) error { +func (c *PGCache) InsertWebGraphQL(e *GraphQL) error { + sessionID := e.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err @@ -123,7 +127,8 @@ func (c *PGCache) InsertWebGraphQL(sessionID uint64, e *GraphQL) error { return c.Conn.InsertWebGraphQL(sessionID, session.ProjectID, project.SaveRequestPayloads, e) } -func (c *PGCache) InsertWebCustomEvent(sessionID uint64, e *CustomEvent) error { +func (c *PGCache) InsertWebCustomEvent(e *CustomEvent) error { + sessionID := e.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err @@ -131,7 +136,8 @@ func (c *PGCache) InsertWebCustomEvent(sessionID uint64, e *CustomEvent) error { return c.Conn.InsertWebCustomEvent(sessionID, session.ProjectID, e) } -func (c *PGCache) InsertWebUserID(sessionID uint64, userID *UserID) error { +func (c *PGCache) InsertWebUserID(userID *UserID) error { + sessionID := userID.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err @@ -139,7 +145,8 @@ func (c *PGCache) InsertWebUserID(sessionID uint64, userID *UserID) error { return c.Conn.InsertWebUserID(sessionID, session.ProjectID, userID) } -func (c *PGCache) InsertWebUserAnonymousID(sessionID uint64, userAnonymousID *UserAnonymousID) error { +func (c *PGCache) InsertWebUserAnonymousID(userAnonymousID *UserAnonymousID) error { + sessionID := userAnonymousID.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err @@ -147,7 +154,8 @@ func (c *PGCache) InsertWebUserAnonymousID(sessionID uint64, userAnonymousID *Us return c.Conn.InsertWebUserAnonymousID(sessionID, session.ProjectID, userAnonymousID) } -func (c *PGCache) InsertWebPageEvent(sessionID uint64, e *PageEvent) error { +func (c *PGCache) InsertWebPageEvent(e *PageEvent) error { + sessionID := e.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err @@ -155,7 +163,8 @@ func (c *PGCache) InsertWebPageEvent(sessionID uint64, e *PageEvent) error { return c.Conn.InsertWebPageEvent(sessionID, session.ProjectID, e) } -func (c *PGCache) InsertWebClickEvent(sessionID uint64, e *ClickEvent) error { +func (c *PGCache) InsertWebClickEvent(e *MouseClick) error { + sessionID := e.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err @@ -163,7 +172,8 @@ func (c *PGCache) InsertWebClickEvent(sessionID uint64, e *ClickEvent) error { return c.Conn.InsertWebClickEvent(sessionID, session.ProjectID, e) } -func (c *PGCache) InsertWebInputEvent(sessionID uint64, e *InputEvent) error { +func (c *PGCache) InsertWebInputEvent(e *InputEvent) error { + sessionID := e.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err diff --git a/backend/pkg/db/clickhouse/connector.go b/backend/pkg/db/clickhouse/connector.go new file mode 100644 index 000000000..1d3a3b4f5 --- /dev/null +++ b/backend/pkg/db/clickhouse/connector.go @@ -0,0 +1,24 @@ +package clickhouse + +import ( + "openreplay/backend/pkg/db/types" + "openreplay/backend/pkg/messages" +) + +type Connector interface { + Prepare() error + Commit() error + Stop() error + InsertWebSession(session *types.Session) error + InsertWebResourceEvent(session *types.Session, msg *messages.ResourceTiming) error + InsertWebPageEvent(session *types.Session, msg *messages.PageEvent) error + InsertWebClickEvent(session *types.Session, msg *messages.MouseClick) error + InsertWebInputEvent(session *types.Session, msg *messages.InputEvent) error + InsertWebErrorEvent(session *types.Session, msg *types.ErrorEvent) error + InsertWebPerformanceTrackAggr(session *types.Session, msg *messages.PerformanceTrackAggr) error + InsertAutocomplete(session *types.Session, msgType, msgValue string) error + InsertRequest(session *types.Session, msg *messages.NetworkRequest, savePayload bool) error + InsertCustom(session *types.Session, msg *messages.CustomEvent) error + InsertGraphQL(session *types.Session, msg *messages.GraphQL) error + InsertIssue(session *types.Session, msg *messages.IssueEvent) error +} diff --git a/backend/pkg/db/postgres/batches.go b/backend/pkg/db/postgres/batches.go index 8b9f2484d..bf4d1745c 100644 --- a/backend/pkg/db/postgres/batches.go +++ b/backend/pkg/db/postgres/batches.go @@ -193,9 +193,7 @@ func (conn *BatchSet) worker() { for { select { case t := <-conn.workerTask: - start := time.Now() conn.sendBatches(t) - log.Printf("pg batches dur: %d", time.Now().Sub(start).Milliseconds()) case <-conn.done: if len(conn.workerTask) > 0 { for t := range conn.workerTask { diff --git a/backend/pkg/db/postgres/bulks.go b/backend/pkg/db/postgres/bulks.go index f3e9e95c9..27ab2cafd 100644 --- a/backend/pkg/db/postgres/bulks.go +++ b/backend/pkg/db/postgres/bulks.go @@ -2,7 +2,6 @@ package postgres import ( "log" - "time" ) type bulksTask struct { @@ -243,9 +242,7 @@ func (conn *BulkSet) worker() { for { select { case t := <-conn.workerTask: - start := time.Now() conn.sendBulks(t) - log.Printf("pg bulks dur: %d", time.Now().Sub(start).Milliseconds()) case <-conn.done: if len(conn.workerTask) > 0 { for t := range conn.workerTask { diff --git a/backend/pkg/db/postgres/connector.go b/backend/pkg/db/postgres/connector.go index 6904dc135..be748e6a2 100644 --- a/backend/pkg/db/postgres/connector.go +++ b/backend/pkg/db/postgres/connector.go @@ -17,7 +17,7 @@ type Conn struct { c Pool batches *BatchSet bulks *BulkSet - chConn CH + chConn CH // hack for autocomplete inserts, TODO: rewrite } func (conn *Conn) SetClickHouse(ch CH) { diff --git a/backend/pkg/db/postgres/messages-ios.go b/backend/pkg/db/postgres/messages-ios.go index 027cfc968..ace1955f5 100644 --- a/backend/pkg/db/postgres/messages-ios.go +++ b/backend/pkg/db/postgres/messages-ios.go @@ -6,7 +6,8 @@ import ( "openreplay/backend/pkg/url" ) -func (conn *Conn) InsertIOSCustomEvent(sessionID uint64, e *messages.IOSCustomEvent) error { +func (conn *Conn) InsertIOSCustomEvent(e *messages.IOSCustomEvent) error { + sessionID := e.SessionID() err := conn.InsertCustomEvent(sessionID, e.Timestamp, truncSqIdx(e.Index), e.Name, e.Payload) if err == nil { conn.insertAutocompleteValue(sessionID, 0, "CUSTOM_IOS", e.Name) @@ -14,7 +15,8 @@ func (conn *Conn) InsertIOSCustomEvent(sessionID uint64, e *messages.IOSCustomEv return err } -func (conn *Conn) InsertIOSUserID(sessionID uint64, userID *messages.IOSUserID) error { +func (conn *Conn) InsertIOSUserID(userID *messages.IOSUserID) error { + sessionID := userID.SessionID() err := conn.InsertUserID(sessionID, userID.Value) if err == nil { conn.insertAutocompleteValue(sessionID, 0, "USERID_IOS", userID.Value) @@ -22,7 +24,8 @@ func (conn *Conn) InsertIOSUserID(sessionID uint64, userID *messages.IOSUserID) return err } -func (conn *Conn) InsertIOSUserAnonymousID(sessionID uint64, userAnonymousID *messages.IOSUserAnonymousID) error { +func (conn *Conn) InsertIOSUserAnonymousID(userAnonymousID *messages.IOSUserAnonymousID) error { + sessionID := userAnonymousID.SessionID() err := conn.InsertUserAnonymousID(sessionID, userAnonymousID.Value) if err == nil { conn.insertAutocompleteValue(sessionID, 0, "USERANONYMOUSID_IOS", userAnonymousID.Value) @@ -30,7 +33,8 @@ func (conn *Conn) InsertIOSUserAnonymousID(sessionID uint64, userAnonymousID *me return err } -func (conn *Conn) InsertIOSNetworkCall(sessionID uint64, e *messages.IOSNetworkCall) error { +func (conn *Conn) InsertIOSNetworkCall(e *messages.IOSNetworkCall) error { + sessionID := e.SessionID() err := conn.InsertRequest(sessionID, e.Timestamp, truncSqIdx(e.Index), e.URL, e.Duration, e.Success) if err == nil { conn.insertAutocompleteValue(sessionID, 0, "REQUEST_IOS", url.DiscardURLQuery(e.URL)) @@ -38,7 +42,8 @@ func (conn *Conn) InsertIOSNetworkCall(sessionID uint64, e *messages.IOSNetworkC return err } -func (conn *Conn) InsertIOSScreenEnter(sessionID uint64, screenEnter *messages.IOSScreenEnter) error { +func (conn *Conn) InsertIOSScreenEnter(screenEnter *messages.IOSScreenEnter) error { + sessionID := screenEnter.SessionID() tx, err := conn.c.Begin() if err != nil { return err @@ -69,7 +74,8 @@ func (conn *Conn) InsertIOSScreenEnter(sessionID uint64, screenEnter *messages.I return nil } -func (conn *Conn) InsertIOSClickEvent(sessionID uint64, clickEvent *messages.IOSClickEvent) error { +func (conn *Conn) InsertIOSClickEvent(clickEvent *messages.IOSClickEvent) error { + sessionID := clickEvent.SessionID() tx, err := conn.c.Begin() if err != nil { return err @@ -100,7 +106,8 @@ func (conn *Conn) InsertIOSClickEvent(sessionID uint64, clickEvent *messages.IOS return nil } -func (conn *Conn) InsertIOSInputEvent(sessionID uint64, inputEvent *messages.IOSInputEvent) error { +func (conn *Conn) InsertIOSInputEvent(inputEvent *messages.IOSInputEvent) error { + sessionID := inputEvent.SessionID() tx, err := conn.c.Begin() if err != nil { return err @@ -137,7 +144,8 @@ func (conn *Conn) InsertIOSInputEvent(sessionID uint64, inputEvent *messages.IOS return nil } -func (conn *Conn) InsertIOSCrash(sessionID uint64, projectID uint32, crash *messages.IOSCrash) error { +func (conn *Conn) InsertIOSCrash(projectID uint32, crash *messages.IOSCrash) error { + sessionID := crash.SessionID() tx, err := conn.c.Begin() if err != nil { return err diff --git a/backend/pkg/db/postgres/messages-web-stats.go b/backend/pkg/db/postgres/messages-web-stats.go index 42458a497..47bd06974 100644 --- a/backend/pkg/db/postgres/messages-web-stats.go +++ b/backend/pkg/db/postgres/messages-web-stats.go @@ -5,7 +5,8 @@ import ( "openreplay/backend/pkg/url" ) -func (conn *Conn) InsertWebStatsPerformance(sessionID uint64, p *PerformanceTrackAggr) error { +func (conn *Conn) InsertWebStatsPerformance(p *PerformanceTrackAggr) error { + sessionID := p.SessionID() timestamp := (p.TimestampEnd + p.TimestampStart) / 2 sqlRequest := ` @@ -35,40 +36,37 @@ func (conn *Conn) InsertWebStatsPerformance(sessionID uint64, p *PerformanceTrac return nil } -func (conn *Conn) InsertWebStatsResourceEvent(sessionID uint64, e *ResourceEvent) error { +func (conn *Conn) InsertWebStatsResourceEvent(e *ResourceTiming) error { + sessionID := e.SessionID() host, _, _, err := url.GetURLParts(e.URL) if err != nil { return err } - + msgType := url.GetResourceType(e.Initiator, e.URL) sqlRequest := ` INSERT INTO events.resources ( session_id, timestamp, message_id, type, url, url_host, url_hostpath, success, status, - method, duration, ttfb, header_size, encoded_body_size, decoded_body_size ) VALUES ( $1, $2, $3, $4, LEFT($5, 8000), LEFT($6, 300), LEFT($7, 2000), $8, $9, - NULLIF($10, '')::events.resource_method, - NULLIF($11, 0), NULLIF($12, 0), NULLIF($13, 0), NULLIF($14, 0), NULLIF($15, 0) + NULLIF($10, 0), NULLIF($11, 0), NULLIF($12, 0), NULLIF($13, 0), NULLIF($14, 0) )` urlQuery := url.DiscardURLQuery(e.URL) - urlMethod := url.EnsureMethod(e.Method) conn.batchQueue(sessionID, sqlRequest, - sessionID, e.Timestamp, truncSqIdx(e.MessageID), - e.Type, + sessionID, e.Timestamp, truncSqIdx(e.MsgID()), + msgType, e.URL, host, urlQuery, - e.Success, e.Status, - urlMethod, + e.Duration != 0, 0, e.Duration, e.TTFB, e.HeaderSize, e.EncodedBodySize, e.DecodedBodySize, ) // Record approximate message size - conn.updateBatchSize(sessionID, len(sqlRequest)+len(e.Type)+len(e.URL)+len(host)+len(urlQuery)+len(urlMethod)+8*9+1) + conn.updateBatchSize(sessionID, len(sqlRequest)+len(msgType)+len(e.URL)+len(host)+len(urlQuery)+8*9+1) return nil } diff --git a/backend/pkg/db/postgres/messages-web.go b/backend/pkg/db/postgres/messages-web.go index 08db4491e..9251a4924 100644 --- a/backend/pkg/db/postgres/messages-web.go +++ b/backend/pkg/db/postgres/messages-web.go @@ -57,10 +57,13 @@ func (conn *Conn) InsertWebPageEvent(sessionID uint64, projectID uint32, e *Page return nil } -func (conn *Conn) InsertWebClickEvent(sessionID uint64, projectID uint32, e *ClickEvent) error { +func (conn *Conn) InsertWebClickEvent(sessionID uint64, projectID uint32, e *MouseClick) error { + if e.Label == "" { + return nil + } var host, path string host, path, _, _ = url.GetURLParts(e.Url) - if err := conn.bulks.Get("webClickEvents").Append(sessionID, truncSqIdx(e.MessageID), e.Timestamp, e.Label, e.Selector, host+path, path); err != nil { + if err := conn.bulks.Get("webClickEvents").Append(sessionID, truncSqIdx(e.MsgID()), e.Timestamp, e.Label, e.Selector, host+path, path); err != nil { log.Printf("insert web click err: %s", err) } // Accumulate session updates and exec inside batch with another sql commands diff --git a/backend/pkg/db/types/error-event.go b/backend/pkg/db/types/error-event.go index bef9abd99..9f2f1a886 100644 --- a/backend/pkg/db/types/error-event.go +++ b/backend/pkg/db/types/error-event.go @@ -120,3 +120,15 @@ func (e *ErrorEvent) ID(projectID uint32) string { } return strconv.FormatUint(uint64(projectID), 16) + hex.EncodeToString(hash.Sum(nil)) } + +func WrapCustomEvent(m *CustomEvent) *IssueEvent { + msg := &IssueEvent{ + Type: "custom", + Timestamp: m.Time(), + MessageID: m.MsgID(), + ContextString: m.Name, + Payload: m.Payload, + } + msg.Meta().SetMeta(m.Meta()) + return msg +} diff --git a/backend/pkg/handlers/custom/eventMapper.go b/backend/pkg/handlers/custom/eventMapper.go deleted file mode 100644 index a85ebbdf0..000000000 --- a/backend/pkg/handlers/custom/eventMapper.go +++ /dev/null @@ -1,82 +0,0 @@ -package custom - -import ( - "net/url" - "strings" - - . "openreplay/backend/pkg/messages" -) - -func getURLExtention(URL string) string { - u, err := url.Parse(URL) - if err != nil { - return "" - } - i := strings.LastIndex(u.Path, ".") - return u.Path[i+1:] -} - -func getResourceType(initiator string, URL string) string { - switch initiator { - case "xmlhttprequest", "fetch": - return "fetch" - case "img": - return "img" - default: - switch getURLExtention(URL) { - case "css": - return "stylesheet" - case "js": - return "script" - case "png", "gif", "jpg", "jpeg", "svg": - return "img" - case "mp4", "mkv", "ogg", "webm", "avi", "mp3": - return "media" - default: - return "other" - } - } -} - -type EventMapper struct{} - -func (b *EventMapper) Build() Message { - return nil -} - -func (b *EventMapper) Handle(message Message, messageID uint64, timestamp uint64) Message { - switch msg := message.(type) { - case *MouseClick: - if msg.Label != "" { - return &ClickEvent{ - MessageID: messageID, - Label: msg.Label, - HesitationTime: msg.HesitationTime, - Timestamp: timestamp, - Selector: msg.Selector, - } - } - case *ResourceTiming: - return &ResourceEvent{ - MessageID: messageID, - Timestamp: msg.Timestamp, - Duration: msg.Duration, - TTFB: msg.TTFB, - HeaderSize: msg.HeaderSize, - EncodedBodySize: msg.EncodedBodySize, - DecodedBodySize: msg.DecodedBodySize, - URL: msg.URL, - Type: getResourceType(msg.Initiator, msg.URL), - Success: msg.Duration != 0, - } - case *CustomIssue: - return &IssueEvent{ - Type: "custom", - Timestamp: timestamp, - MessageID: messageID, - ContextString: msg.Name, - Payload: msg.Payload, - } - } - return nil -} diff --git a/backend/pkg/handlers/custom/inputEventBuilder.go b/backend/pkg/handlers/custom/inputEventBuilder.go index e07470f37..d057db3e3 100644 --- a/backend/pkg/handlers/custom/inputEventBuilder.go +++ b/backend/pkg/handlers/custom/inputEventBuilder.go @@ -4,7 +4,7 @@ import ( . "openreplay/backend/pkg/messages" ) -const INPUT_EVENT_TIMEOUT = 1 * 60 * 1000 +const InputEventTimeout = 1 * 60 * 1000 type inputLabels map[uint64]string @@ -24,7 +24,7 @@ func (b *inputEventBuilder) clearLabels() { b.inputLabels = make(inputLabels) } -func (b *inputEventBuilder) Handle(message Message, messageID uint64, timestamp uint64) Message { +func (b *inputEventBuilder) Handle(message Message, timestamp uint64) Message { var inputEvent Message = nil switch msg := message.(type) { case *SetInputTarget: @@ -41,7 +41,7 @@ func (b *inputEventBuilder) Handle(message Message, messageID uint64, timestamp } if b.inputEvent == nil { b.inputEvent = &InputEvent{ - MessageID: messageID, + MessageID: message.MsgID(), Timestamp: timestamp, Value: msg.Value, ValueMasked: msg.Mask > 0, @@ -59,7 +59,7 @@ func (b *inputEventBuilder) Handle(message Message, messageID uint64, timestamp return b.Build() } - if b.inputEvent != nil && b.inputEvent.Timestamp+INPUT_EVENT_TIMEOUT < timestamp { + if b.inputEvent != nil && b.inputEvent.Timestamp+InputEventTimeout < timestamp { return b.Build() } return nil diff --git a/backend/pkg/handlers/custom/pageEventBuilder.go b/backend/pkg/handlers/custom/pageEventBuilder.go index d95768983..5bab7d4cc 100644 --- a/backend/pkg/handlers/custom/pageEventBuilder.go +++ b/backend/pkg/handlers/custom/pageEventBuilder.go @@ -4,7 +4,7 @@ import ( . "openreplay/backend/pkg/messages" ) -const PAGE_EVENT_TIMEOUT = 1 * 60 * 1000 +const PageEventTimeout = 1 * 60 * 1000 type pageEventBuilder struct { pageEvent *PageEvent @@ -16,7 +16,7 @@ func NewPageEventBuilder() *pageEventBuilder { return ieBuilder } -func (b *pageEventBuilder) Handle(message Message, messageID uint64, timestamp uint64) Message { +func (b *pageEventBuilder) Handle(message Message, timestamp uint64) Message { switch msg := message.(type) { case *SetPageLocation: if msg.NavigationStart == 0 { // routing without new page loading @@ -24,7 +24,7 @@ func (b *pageEventBuilder) Handle(message Message, messageID uint64, timestamp u URL: msg.URL, Referrer: msg.Referrer, Loaded: false, - MessageID: messageID, + MessageID: message.MsgID(), Timestamp: timestamp, } } else { @@ -33,7 +33,7 @@ func (b *pageEventBuilder) Handle(message Message, messageID uint64, timestamp u URL: msg.URL, Referrer: msg.Referrer, Loaded: true, - MessageID: messageID, + MessageID: message.MsgID(), Timestamp: timestamp, } return pageEvent @@ -81,7 +81,7 @@ func (b *pageEventBuilder) Handle(message Message, messageID uint64, timestamp u } - if b.pageEvent != nil && b.pageEvent.Timestamp+PAGE_EVENT_TIMEOUT < timestamp { + if b.pageEvent != nil && b.pageEvent.Timestamp+PageEventTimeout < timestamp { return b.Build() } return nil diff --git a/backend/pkg/handlers/ios/clickRage.go b/backend/pkg/handlers/ios/clickRage.go index 84c130dae..91283de90 100644 --- a/backend/pkg/handlers/ios/clickRage.go +++ b/backend/pkg/handlers/ios/clickRage.go @@ -48,7 +48,7 @@ func (h *ClickRageDetector) Handle(message Message, messageID uint64, timestamp } func (h *ClickRageDetector) Build() Message { - if h.countsInARow >= web.MIN_CLICKS_IN_A_ROW { + if h.countsInARow >= web.MinClicksInARow { event := &IOSIssueEvent{ Type: "click_rage", ContextString: h.lastLabel, diff --git a/backend/pkg/handlers/messageProcessor.go b/backend/pkg/handlers/messageProcessor.go index c4235c18b..38d02ab1a 100644 --- a/backend/pkg/handlers/messageProcessor.go +++ b/backend/pkg/handlers/messageProcessor.go @@ -6,6 +6,6 @@ import . "openreplay/backend/pkg/messages" // U can create your own message handler and easily connect to heuristics service type MessageProcessor interface { - Handle(message Message, messageID uint64, timestamp uint64) Message + Handle(message Message, timestamp uint64) Message Build() Message } diff --git a/backend/pkg/handlers/web/clickRage.go b/backend/pkg/handlers/web/clickRage.go index 6974ee1b0..56692765e 100644 --- a/backend/pkg/handlers/web/clickRage.go +++ b/backend/pkg/handlers/web/clickRage.go @@ -7,14 +7,8 @@ import ( . "openreplay/backend/pkg/messages" ) -/* - Handler name: ClickRage - Input event: MouseClick - Output event: IssueEvent -*/ - -const MAX_TIME_DIFF = 300 -const MIN_CLICKS_IN_A_ROW = 3 +const MaxTimeDiff = 300 +const MinClicksInARow = 3 type ClickRageDetector struct { lastTimestamp uint64 @@ -34,46 +28,54 @@ func (crd *ClickRageDetector) reset() { crd.url = "" } -func (crd *ClickRageDetector) Build() Message { - defer crd.reset() - if crd.countsInARow >= MIN_CLICKS_IN_A_ROW { - payload, err := json.Marshal(struct{ Count int }{crd.countsInARow}) - if err != nil { - log.Printf("can't marshal ClickRage payload to json: %s", err) - } - event := &IssueEvent{ - Type: "click_rage", - ContextString: crd.lastLabel, - Payload: string(payload), - Timestamp: crd.firstInARawTimestamp, - MessageID: crd.firstInARawMessageId, - URL: crd.url, - } - return event +func (crd *ClickRageDetector) createPayload() string { + p, err := json.Marshal(struct{ Count int }{crd.countsInARow}) + if err != nil { + log.Printf("can't marshal ClickRage payload to json: %s", err) + return "" } - return nil + return string(p) } -func (crd *ClickRageDetector) Handle(message Message, messageID uint64, timestamp uint64) Message { +func (crd *ClickRageDetector) Build() Message { + defer crd.reset() + if crd.countsInARow < MinClicksInARow { + return nil + } + return &IssueEvent{ + Type: "click_rage", + ContextString: crd.lastLabel, + Payload: crd.createPayload(), + Timestamp: crd.firstInARawTimestamp, + MessageID: crd.firstInARawMessageId, + URL: crd.url, + } +} + +func (crd *ClickRageDetector) Handle(message Message, timestamp uint64) Message { switch msg := message.(type) { case *MouseClick: + // Set click url if crd.url == "" && msg.Url != "" { crd.url = msg.Url } - // TODO: check if we it is ok to capture clickRage event without the connected ClickEvent in db. + // Click on different object -> build if we can and reset the builder if msg.Label == "" { return crd.Build() } - if crd.lastLabel == msg.Label && timestamp-crd.lastTimestamp < MAX_TIME_DIFF { + // Update builder with last information + if crd.lastLabel == msg.Label && timestamp-crd.lastTimestamp < MaxTimeDiff { crd.lastTimestamp = timestamp crd.countsInARow += 1 return nil } + // Try to build event event := crd.Build() + // Use current message as init values for new event crd.lastTimestamp = timestamp crd.lastLabel = msg.Label crd.firstInARawTimestamp = timestamp - crd.firstInARawMessageId = messageID + crd.firstInARawMessageId = message.MsgID() crd.countsInARow = 1 if crd.url == "" && msg.Url != "" { crd.url = msg.Url diff --git a/backend/pkg/handlers/web/cpuIssue.go b/backend/pkg/handlers/web/cpuIssue.go index 56f483e8b..74117fc1f 100644 --- a/backend/pkg/handlers/web/cpuIssue.go +++ b/backend/pkg/handlers/web/cpuIssue.go @@ -15,8 +15,8 @@ import ( Output event: IssueEvent */ -const CPU_THRESHOLD = 70 // % out of 100 -const CPU_MIN_DURATION_TRIGGER = 6 * 1000 +const CpuThreshold = 70 // % out of 100 +const CpuMinDurationTrigger = 6 * 1000 type CpuIssueDetector struct { startTimestamp uint64 @@ -26,65 +26,61 @@ type CpuIssueDetector struct { contextString string } -func (f *CpuIssueDetector) Build() Message { - if f.startTimestamp == 0 { - return nil - } - duration := f.lastTimestamp - f.startTimestamp - timestamp := f.startTimestamp - messageID := f.startMessageID - maxRate := f.maxRate - - f.startTimestamp = 0 - f.startMessageID = 0 - f.maxRate = 0 - if duration < CPU_MIN_DURATION_TRIGGER { - return nil - } - - payload, err := json.Marshal(struct { +func (f *CpuIssueDetector) createPayload() string { + p, err := json.Marshal(struct { Duration uint64 Rate uint64 - }{duration, maxRate}) + }{f.duration(), f.maxRate}) if err != nil { log.Printf("can't marshal CpuIssue payload to json: %s", err) } + return string(p) +} +func (f *CpuIssueDetector) duration() uint64 { + return f.lastTimestamp - f.startTimestamp +} + +func (f *CpuIssueDetector) reset() { + f.startTimestamp = 0 + f.startMessageID = 0 + f.maxRate = 0 +} + +func (f *CpuIssueDetector) Build() Message { + defer f.reset() + if f.startTimestamp == 0 || f.duration() < CpuMinDurationTrigger { + return nil + } return &IssueEvent{ Type: "cpu", - Timestamp: timestamp, - MessageID: messageID, + Timestamp: f.startTimestamp, + MessageID: f.startMessageID, ContextString: f.contextString, - Payload: string(payload), + Payload: f.createPayload(), } } -func (f *CpuIssueDetector) Handle(message Message, messageID uint64, timestamp uint64) Message { +func (f *CpuIssueDetector) Handle(message Message, timestamp uint64) Message { switch msg := message.(type) { case *PerformanceTrack: - dt := performance.TimeDiff(timestamp, f.lastTimestamp) - if dt == 0 { - return nil // TODO: handle error + // Ignore if it's a wrong message order + if timestamp < f.lastTimestamp { + return nil } - f.lastTimestamp = timestamp - - if msg.Frames == -1 || msg.Ticks == -1 { + cpuRate := performance.CPURate(msg.Ticks, performance.TimeDiff(timestamp, f.lastTimestamp)) + // Build event if cpu issue have gone + if msg.Frames == -1 || msg.Ticks == -1 || cpuRate < CpuThreshold { return f.Build() } - - cpuRate := performance.CPURate(msg.Ticks, dt) - - if cpuRate >= CPU_THRESHOLD { - if f.startTimestamp == 0 { - f.startTimestamp = timestamp - f.startMessageID = messageID - } - if f.maxRate < cpuRate { - f.maxRate = cpuRate - } - } else { - return f.Build() + // Update values + if f.startTimestamp == 0 { + f.startTimestamp = timestamp + f.startMessageID = message.MsgID() + } + if f.maxRate < cpuRate { + f.maxRate = cpuRate } case *SetPageLocation: f.contextString = msg.URL diff --git a/backend/pkg/handlers/web/deadClick.go b/backend/pkg/handlers/web/deadClick.go index 434e6c1ce..5b02dc498 100644 --- a/backend/pkg/handlers/web/deadClick.go +++ b/backend/pkg/handlers/web/deadClick.go @@ -4,43 +4,39 @@ import ( . "openreplay/backend/pkg/messages" ) -/* - Handler name: DeadClick - Input events: SetInputTarget, - CreateDocument, - MouseClick, - SetNodeAttribute, - RemoveNodeAttribute, - CreateElementNode, - CreateTextNode, - MoveNode, - RemoveNode, - SetCSSData, - CSSInsertRule, - CSSDeleteRule - Output event: IssueEvent -*/ - -const CLICK_RELATION_TIME = 1234 +const ClickRelationTime = 1234 type DeadClickDetector struct { - lastTimestamp uint64 lastMouseClick *MouseClick + lastTimestamp uint64 lastClickTimestamp uint64 lastMessageID uint64 inputIDSet map[uint64]bool } +func NewDeadClickDetector() *DeadClickDetector { + return &DeadClickDetector{inputIDSet: make(map[uint64]bool)} +} + +func (d *DeadClickDetector) addInputID(id uint64) { + d.inputIDSet[id] = true +} + +func (d *DeadClickDetector) clearInputIDs() { + d.inputIDSet = make(map[uint64]bool) +} + func (d *DeadClickDetector) reset() { - d.inputIDSet = nil d.lastMouseClick = nil d.lastClickTimestamp = 0 d.lastMessageID = 0 + d.clearInputIDs() } -func (d *DeadClickDetector) build(timestamp uint64) Message { +func (d *DeadClickDetector) Build() Message { + // remove reset from external Build call defer d.reset() - if d.lastMouseClick == nil || d.lastClickTimestamp+CLICK_RELATION_TIME > timestamp { // reaction is instant + if d.lastMouseClick == nil || d.lastClickTimestamp+ClickRelationTime > d.lastTimestamp { // reaction is instant return nil } event := &IssueEvent{ @@ -52,42 +48,37 @@ func (d *DeadClickDetector) build(timestamp uint64) Message { return event } -func (d *DeadClickDetector) Build() Message { - return d.build(d.lastTimestamp) -} - -func (d *DeadClickDetector) Handle(message Message, messageID uint64, timestamp uint64) Message { +func (d *DeadClickDetector) Handle(message Message, timestamp uint64) Message { d.lastTimestamp = timestamp switch msg := message.(type) { case *SetInputTarget: - if d.inputIDSet == nil { - d.inputIDSet = make(map[uint64]bool) - } - d.inputIDSet[msg.ID] = true + d.addInputID(msg.ID) case *CreateDocument: - d.inputIDSet = nil + d.clearInputIDs() case *MouseClick: if msg.Label == "" { return nil } - event := d.build(timestamp) - if d.inputIDSet[msg.ID] { // ignore if input + isInputEvent := d.inputIDSet[msg.ID] + event := d.Build() + if isInputEvent { return event } d.lastMouseClick = msg d.lastClickTimestamp = timestamp - d.lastMessageID = messageID + d.lastMessageID = message.MsgID() return event case *SetNodeAttribute, *RemoveNodeAttribute, *CreateElementNode, *CreateTextNode, + *SetNodeFocus, *MoveNode, *RemoveNode, *SetCSSData, *CSSInsertRule, *CSSDeleteRule: - return d.build(timestamp) + return d.Build() } return nil } diff --git a/backend/pkg/handlers/web/domDrop.go b/backend/pkg/handlers/web/domDrop.go deleted file mode 100644 index 4a3ec2065..000000000 --- a/backend/pkg/handlers/web/domDrop.go +++ /dev/null @@ -1,55 +0,0 @@ -package web - -import ( - . "openreplay/backend/pkg/messages" -) - -/* - Handler name: DomDrop - Input events: CreateElementNode, - CreateTextNode, - RemoveNode - Output event: DOMDrop -*/ - -const DROP_WINDOW = 200 //ms -const CRITICAL_COUNT = 1 // Our login page contains 20. But on crush it removes only roots (1-3 nodes). -// TODO: smart detection (making whole DOM tree would eat all memory) - -type domDropDetector struct { - removedCount int - lastDropTimestamp uint64 -} - -func (dd *domDropDetector) reset() { - dd.removedCount = 0 - dd.lastDropTimestamp = 0 -} - -func (dd *domDropDetector) Handle(message Message, _ uint64, timestamp uint64) Message { - switch message.(type) { - case *CreateElementNode, - *CreateTextNode: - dd.removedCount = 0 - dd.lastDropTimestamp = 0 - case *RemoveNode: - if dd.lastDropTimestamp+DROP_WINDOW > timestamp { - dd.removedCount += 1 - } else { - dd.removedCount = 1 - } - dd.lastDropTimestamp = timestamp - } - return nil -} - -func (dd *domDropDetector) Build() Message { - defer dd.reset() - if dd.removedCount >= CRITICAL_COUNT { - domDrop := &DOMDrop{ - Timestamp: dd.lastDropTimestamp, - } - return domDrop - } - return nil -} diff --git a/backend/pkg/handlers/web/memoryIssue.go b/backend/pkg/handlers/web/memoryIssue.go index 487c396a9..4b3022d74 100644 --- a/backend/pkg/handlers/web/memoryIssue.go +++ b/backend/pkg/handlers/web/memoryIssue.go @@ -8,13 +8,6 @@ import ( . "openreplay/backend/pkg/messages" ) -/* - Handler name: MemoryIssue - Input events: PerformanceTrack, - SetPageLocation - Output event: IssueEvent -*/ - const MIN_COUNT = 3 const MEM_RATE_THRESHOLD = 300 // % to average @@ -52,7 +45,7 @@ func (f *MemoryIssueDetector) Build() Message { return event } -func (f *MemoryIssueDetector) Handle(message Message, messageID uint64, timestamp uint64) Message { +func (f *MemoryIssueDetector) Handle(message Message, timestamp uint64) Message { switch msg := message.(type) { case *PerformanceTrack: if f.count < MIN_COUNT { @@ -70,7 +63,7 @@ func (f *MemoryIssueDetector) Handle(message Message, messageID uint64, timestam if rate >= MEM_RATE_THRESHOLD { if f.startTimestamp == 0 { f.startTimestamp = timestamp - f.startMessageID = messageID + f.startMessageID = message.MsgID() } if f.rate < rate { f.rate = rate diff --git a/backend/pkg/handlers/web/networkIssue.go b/backend/pkg/handlers/web/networkIssue.go index 20ef412dd..9dd1f7a04 100644 --- a/backend/pkg/handlers/web/networkIssue.go +++ b/backend/pkg/handlers/web/networkIssue.go @@ -4,26 +4,19 @@ import ( . "openreplay/backend/pkg/messages" ) -/* - Handler name: NetworkIssue - Input events: ResourceTiming, - NetworkRequest - Output event: IssueEvent -*/ - type NetworkIssueDetector struct{} func (f *NetworkIssueDetector) Build() Message { return nil } -func (f *NetworkIssueDetector) Handle(message Message, messageID uint64, timestamp uint64) Message { +func (f *NetworkIssueDetector) Handle(message Message, timestamp uint64) Message { switch msg := message.(type) { case *NetworkRequest: if msg.Status >= 400 { return &IssueEvent{ Type: "bad_request", - MessageID: messageID, + MessageID: message.MsgID(), Timestamp: msg.Timestamp, ContextString: msg.URL, } diff --git a/backend/pkg/handlers/web/performanceAggregator.go b/backend/pkg/handlers/web/performanceAggregator.go index ba23978b2..babe136a5 100644 --- a/backend/pkg/handlers/web/performanceAggregator.go +++ b/backend/pkg/handlers/web/performanceAggregator.go @@ -7,13 +7,7 @@ import ( "openreplay/backend/pkg/messages/performance" ) -/* - Handler name: PerformanceAggregator - Input event: PerformanceTrack - Output event: PerformanceTrackAggr -*/ - -const AGGREGATION_WINDOW = 2 * 60 * 1000 +const AggregationWindow = 2 * 60 * 1000 type PerformanceAggregator struct { *PerformanceTrackAggr @@ -42,7 +36,7 @@ func (b *PerformanceAggregator) reset() { b.lastTimestamp = 0 } -func (b *PerformanceAggregator) Handle(message Message, _ uint64, timestamp uint64) Message { +func (b *PerformanceAggregator) Handle(message Message, timestamp uint64) Message { switch msg := message.(type) { case *PerformanceTrack: if b.PerformanceTrackAggr == nil || msg.Frames == -1 || msg.Ticks == -1 { @@ -93,7 +87,7 @@ func (b *PerformanceAggregator) Handle(message Message, _ uint64, timestamp uint b.lastTimestamp = timestamp } if b.PerformanceTrackAggr != nil && - timestamp-b.PerformanceTrackAggr.TimestampStart >= AGGREGATION_WINDOW { + timestamp-b.PerformanceTrackAggr.TimestampStart >= AggregationWindow { return b.Build() } return nil diff --git a/backend/pkg/messages/filters.go b/backend/pkg/messages/filters.go index 30e266194..300e38883 100644 --- a/backend/pkg/messages/filters.go +++ b/backend/pkg/messages/filters.go @@ -2,7 +2,7 @@ package messages func IsReplayerType(id int) bool { - return 1 != id && 3 != id && 17 != id && 23 != id && 24 != id && 25 != id && 26 != id && 27 != id && 28 != id && 29 != id && 30 != id && 31 != id && 32 != id && 33 != id && 35 != id && 42 != id && 52 != id && 56 != id && 62 != id && 63 != id && 64 != id && 66 != id && 78 != id && 80 != id && 81 != id && 82 != id && 125 != id && 126 != id && 127 != id && 107 != id && 91 != id && 92 != id && 94 != id && 95 != id && 97 != id && 98 != id && 99 != id && 101 != id && 104 != id && 110 != id && 111 != id + return 1 != id && 3 != id && 17 != id && 23 != id && 24 != id && 25 != id && 26 != id && 27 != id && 28 != id && 29 != id && 30 != id && 31 != id && 32 != id && 42 != id && 56 != id && 62 != id && 63 != id && 64 != id && 66 != id && 78 != id && 80 != id && 81 != id && 82 != id && 125 != id && 126 != id && 127 != id && 107 != id && 91 != id && 92 != id && 94 != id && 95 != id && 97 != id && 98 != id && 99 != id && 101 != id && 104 != id && 110 != id && 111 != id } func IsIOSType(id int) bool { @@ -11,4 +11,4 @@ func IsIOSType(id int) bool { func IsDOMType(id int) bool { return 0 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 37 == id || 38 == id || 49 == id || 50 == id || 51 == id || 54 == id || 55 == id || 57 == id || 58 == id || 59 == id || 60 == id || 61 == id || 67 == id || 69 == id || 70 == id || 71 == id || 72 == id || 73 == id || 74 == id || 75 == id || 76 == id || 77 == id || 90 == id || 93 == id || 96 == id || 100 == id || 102 == id || 103 == id || 105 == id -} +} \ No newline at end of file diff --git a/backend/pkg/messages/iterator-ender.go b/backend/pkg/messages/iterator-ender.go index 6ca7db034..1d9063749 100644 --- a/backend/pkg/messages/iterator-ender.go +++ b/backend/pkg/messages/iterator-ender.go @@ -126,7 +126,7 @@ func (i *enderMessageIteratorImpl) preprocessing(msg Message) error { return fmt.Errorf("incorrect batch version: %d, skip current batch, info: %s", i.version, i.batchInfo.Info()) } i.messageInfo.Index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha) - i.messageInfo.Timestamp = m.Timestamp + i.messageInfo.Timestamp = uint64(m.Timestamp) if m.Timestamp == 0 { i.zeroTsLog("BatchMetadata") } @@ -139,7 +139,7 @@ func (i *enderMessageIteratorImpl) preprocessing(msg Message) error { return fmt.Errorf("batchMeta found at the end of the batch, info: %s", i.batchInfo.Info()) } i.messageInfo.Index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha) - i.messageInfo.Timestamp = m.Timestamp + i.messageInfo.Timestamp = uint64(m.Timestamp) if m.Timestamp == 0 { i.zeroTsLog("BatchMeta") } @@ -149,13 +149,13 @@ func (i *enderMessageIteratorImpl) preprocessing(msg Message) error { } case *Timestamp: - i.messageInfo.Timestamp = int64(m.Timestamp) + i.messageInfo.Timestamp = m.Timestamp if m.Timestamp == 0 { i.zeroTsLog("Timestamp") } case *SessionStart: - i.messageInfo.Timestamp = int64(m.Timestamp) + i.messageInfo.Timestamp = m.Timestamp if m.Timestamp == 0 { i.zeroTsLog("SessionStart") log.Printf("zero session start, project: %d, UA: %s, tracker: %s, info: %s", @@ -163,7 +163,7 @@ func (i *enderMessageIteratorImpl) preprocessing(msg Message) error { } case *SessionEnd: - i.messageInfo.Timestamp = int64(m.Timestamp) + i.messageInfo.Timestamp = m.Timestamp if m.Timestamp == 0 { i.zeroTsLog("SessionEnd") } diff --git a/backend/pkg/messages/iterator-sink.go b/backend/pkg/messages/iterator-sink.go index be12b63eb..15ab5c077 100644 --- a/backend/pkg/messages/iterator-sink.go +++ b/backend/pkg/messages/iterator-sink.go @@ -128,7 +128,7 @@ func (i *sinkMessageIteratorImpl) preprocessing(msg Message) error { return fmt.Errorf("incorrect batch version: %d, skip current batch, info: %s", i.version, i.batchInfo.Info()) } i.messageInfo.Index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha) - i.messageInfo.Timestamp = m.Timestamp + i.messageInfo.Timestamp = uint64(m.Timestamp) if m.Timestamp == 0 { i.zeroTsLog("BatchMetadata") } @@ -141,7 +141,7 @@ func (i *sinkMessageIteratorImpl) preprocessing(msg Message) error { return fmt.Errorf("batchMeta found at the end of the batch, info: %s", i.batchInfo.Info()) } i.messageInfo.Index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha) - i.messageInfo.Timestamp = m.Timestamp + i.messageInfo.Timestamp = uint64(m.Timestamp) if m.Timestamp == 0 { i.zeroTsLog("BatchMeta") } @@ -151,13 +151,13 @@ func (i *sinkMessageIteratorImpl) preprocessing(msg Message) error { } case *Timestamp: - i.messageInfo.Timestamp = int64(m.Timestamp) + i.messageInfo.Timestamp = m.Timestamp if m.Timestamp == 0 { i.zeroTsLog("Timestamp") } case *SessionStart: - i.messageInfo.Timestamp = int64(m.Timestamp) + i.messageInfo.Timestamp = m.Timestamp if m.Timestamp == 0 { i.zeroTsLog("SessionStart") log.Printf("zero session start, project: %d, UA: %s, tracker: %s, info: %s", @@ -165,7 +165,7 @@ func (i *sinkMessageIteratorImpl) preprocessing(msg Message) error { } case *SessionEnd: - i.messageInfo.Timestamp = int64(m.Timestamp) + i.messageInfo.Timestamp = m.Timestamp if m.Timestamp == 0 { i.zeroTsLog("SessionEnd") } diff --git a/backend/pkg/messages/iterator.go b/backend/pkg/messages/iterator.go index f7b014d30..4a39a7fce 100644 --- a/backend/pkg/messages/iterator.go +++ b/backend/pkg/messages/iterator.go @@ -108,11 +108,20 @@ func (i *messageIteratorImpl) Iterate(batchData []byte, batchInfo *BatchInfo) { // Set meta information for message msg.Meta().SetMeta(i.messageInfo) + // Update timestamp value for iOS message types + if IsIOSType(msgType) { + msg.Meta().Timestamp = i.getIOSTimestamp(msg) + } + // Process message i.handler(msg) } } +func (i *messageIteratorImpl) getIOSTimestamp(msg Message) uint64 { + return GetTimestamp(msg) +} + func (i *messageIteratorImpl) zeroTsLog(msgType string) { log.Printf("zero timestamp in %s, info: %s", msgType, i.batchInfo.Info()) } @@ -127,7 +136,7 @@ func (i *messageIteratorImpl) preprocessing(msg Message) error { return fmt.Errorf("incorrect batch version: %d, skip current batch, info: %s", i.version, i.batchInfo.Info()) } i.messageInfo.Index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha) - i.messageInfo.Timestamp = m.Timestamp + i.messageInfo.Timestamp = uint64(m.Timestamp) if m.Timestamp == 0 { i.zeroTsLog("BatchMetadata") } @@ -140,7 +149,7 @@ func (i *messageIteratorImpl) preprocessing(msg Message) error { return fmt.Errorf("batchMeta found at the end of the batch, info: %s", i.batchInfo.Info()) } i.messageInfo.Index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha) - i.messageInfo.Timestamp = m.Timestamp + i.messageInfo.Timestamp = uint64(m.Timestamp) if m.Timestamp == 0 { i.zeroTsLog("BatchMeta") } @@ -150,13 +159,13 @@ func (i *messageIteratorImpl) preprocessing(msg Message) error { } case *Timestamp: - i.messageInfo.Timestamp = int64(m.Timestamp) + i.messageInfo.Timestamp = m.Timestamp if m.Timestamp == 0 { i.zeroTsLog("Timestamp") } case *SessionStart: - i.messageInfo.Timestamp = int64(m.Timestamp) + i.messageInfo.Timestamp = m.Timestamp if m.Timestamp == 0 { i.zeroTsLog("SessionStart") log.Printf("zero session start, project: %d, UA: %s, tracker: %s, info: %s", @@ -164,7 +173,7 @@ func (i *messageIteratorImpl) preprocessing(msg Message) error { } case *SessionEnd: - i.messageInfo.Timestamp = int64(m.Timestamp) + i.messageInfo.Timestamp = m.Timestamp if m.Timestamp == 0 { i.zeroTsLog("SessionEnd") } diff --git a/backend/pkg/messages/message.go b/backend/pkg/messages/message.go index 3a8e029d5..6ae02e6c5 100644 --- a/backend/pkg/messages/message.go +++ b/backend/pkg/messages/message.go @@ -8,6 +8,8 @@ type Message interface { TypeID() int Meta() *message SessionID() uint64 + MsgID() uint64 + Time() uint64 } // BatchInfo represents common information for all messages inside data batch @@ -47,7 +49,7 @@ func (b *BatchInfo) Info() string { } type message struct { - Timestamp int64 + Timestamp uint64 Index uint64 Url string batch *BatchInfo @@ -72,6 +74,14 @@ func (m *message) SessionID() uint64 { return m.batch.sessionID } +func (m *message) MsgID() uint64 { + return m.Meta().Index +} + +func (m *message) Time() uint64 { + return m.Meta().Timestamp +} + func (m *message) SetSessionID(sessID uint64) { if m.batch == nil { m.batch = &BatchInfo{} diff --git a/backend/pkg/messages/messages.go b/backend/pkg/messages/messages.go index a96f98de8..52fbb7a64 100644 --- a/backend/pkg/messages/messages.go +++ b/backend/pkg/messages/messages.go @@ -2,107 +2,105 @@ package messages const ( - MsgTimestamp = 0 - MsgSessionStart = 1 - MsgSessionEndDeprecated = 3 - MsgSetPageLocation = 4 - MsgSetViewportSize = 5 - MsgSetViewportScroll = 6 - MsgCreateDocument = 7 - MsgCreateElementNode = 8 - MsgCreateTextNode = 9 - MsgMoveNode = 10 - MsgRemoveNode = 11 - MsgSetNodeAttribute = 12 - MsgRemoveNodeAttribute = 13 - MsgSetNodeData = 14 - MsgSetCSSData = 15 - MsgSetNodeScroll = 16 - MsgSetInputTarget = 17 - MsgSetInputValue = 18 - MsgSetInputChecked = 19 - MsgMouseMove = 20 - MsgNetworkRequest = 21 - MsgConsoleLog = 22 - MsgPageLoadTiming = 23 - MsgPageRenderTiming = 24 - MsgJSExceptionDeprecated = 25 - MsgIntegrationEvent = 26 - MsgCustomEvent = 27 - MsgUserID = 28 - MsgUserAnonymousID = 29 - MsgMetadata = 30 - MsgPageEvent = 31 - MsgInputEvent = 32 - MsgClickEvent = 33 - MsgResourceEvent = 35 - MsgCSSInsertRule = 37 - MsgCSSDeleteRule = 38 - MsgFetch = 39 - MsgProfiler = 40 - MsgOTable = 41 - MsgStateAction = 42 - MsgRedux = 44 - MsgVuex = 45 - MsgMobX = 46 - MsgNgRx = 47 - MsgGraphQL = 48 - MsgPerformanceTrack = 49 - MsgStringDict = 50 - MsgSetNodeAttributeDict = 51 - MsgDOMDrop = 52 - MsgResourceTiming = 53 - MsgConnectionInformation = 54 - MsgSetPageVisibility = 55 - MsgPerformanceTrackAggr = 56 - MsgLoadFontFace = 57 - MsgSetNodeFocus = 58 - MsgLongTask = 59 - MsgSetNodeAttributeURLBased = 60 - MsgSetCSSDataURLBased = 61 - MsgIssueEventDeprecated = 62 - MsgTechnicalInfo = 63 - MsgCustomIssue = 64 - MsgAssetCache = 66 - MsgCSSInsertRuleURLBased = 67 - MsgMouseClick = 69 - MsgCreateIFrameDocument = 70 - MsgAdoptedSSReplaceURLBased = 71 - MsgAdoptedSSReplace = 72 - MsgAdoptedSSInsertRuleURLBased = 73 - MsgAdoptedSSInsertRule = 74 - MsgAdoptedSSDeleteRule = 75 - MsgAdoptedSSAddOwner = 76 - MsgAdoptedSSRemoveOwner = 77 - MsgJSException = 78 - MsgZustand = 79 - MsgBatchMeta = 80 - MsgBatchMetadata = 81 - MsgPartitionedMessage = 82 - MsgIssueEvent = 125 - MsgSessionEnd = 126 - MsgSessionSearch = 127 - MsgIOSBatchMeta = 107 - MsgIOSSessionStart = 90 - MsgIOSSessionEnd = 91 - MsgIOSMetadata = 92 - MsgIOSCustomEvent = 93 - MsgIOSUserID = 94 - MsgIOSUserAnonymousID = 95 - MsgIOSScreenChanges = 96 - MsgIOSCrash = 97 - MsgIOSScreenEnter = 98 - MsgIOSScreenLeave = 99 - MsgIOSClickEvent = 100 - MsgIOSInputEvent = 101 - MsgIOSPerformanceEvent = 102 - MsgIOSLog = 103 - MsgIOSInternalError = 104 - MsgIOSNetworkCall = 105 - MsgIOSPerformanceAggregated = 110 - MsgIOSIssueEvent = 111 + MsgTimestamp = 0 + MsgSessionStart = 1 + MsgSessionEndDeprecated = 3 + MsgSetPageLocation = 4 + MsgSetViewportSize = 5 + MsgSetViewportScroll = 6 + MsgCreateDocument = 7 + MsgCreateElementNode = 8 + MsgCreateTextNode = 9 + MsgMoveNode = 10 + MsgRemoveNode = 11 + MsgSetNodeAttribute = 12 + MsgRemoveNodeAttribute = 13 + MsgSetNodeData = 14 + MsgSetCSSData = 15 + MsgSetNodeScroll = 16 + MsgSetInputTarget = 17 + MsgSetInputValue = 18 + MsgSetInputChecked = 19 + MsgMouseMove = 20 + MsgNetworkRequest = 21 + MsgConsoleLog = 22 + MsgPageLoadTiming = 23 + MsgPageRenderTiming = 24 + MsgJSExceptionDeprecated = 25 + MsgIntegrationEvent = 26 + MsgCustomEvent = 27 + MsgUserID = 28 + MsgUserAnonymousID = 29 + MsgMetadata = 30 + MsgPageEvent = 31 + MsgInputEvent = 32 + MsgCSSInsertRule = 37 + MsgCSSDeleteRule = 38 + MsgFetch = 39 + MsgProfiler = 40 + MsgOTable = 41 + MsgStateAction = 42 + MsgRedux = 44 + MsgVuex = 45 + MsgMobX = 46 + MsgNgRx = 47 + MsgGraphQL = 48 + MsgPerformanceTrack = 49 + MsgStringDict = 50 + MsgSetNodeAttributeDict = 51 + MsgResourceTiming = 53 + MsgConnectionInformation = 54 + MsgSetPageVisibility = 55 + MsgPerformanceTrackAggr = 56 + MsgLoadFontFace = 57 + MsgSetNodeFocus = 58 + MsgLongTask = 59 + MsgSetNodeAttributeURLBased = 60 + MsgSetCSSDataURLBased = 61 + MsgIssueEventDeprecated = 62 + MsgTechnicalInfo = 63 + MsgCustomIssue = 64 + MsgAssetCache = 66 + MsgCSSInsertRuleURLBased = 67 + MsgMouseClick = 69 + MsgCreateIFrameDocument = 70 + MsgAdoptedSSReplaceURLBased = 71 + MsgAdoptedSSReplace = 72 + MsgAdoptedSSInsertRuleURLBased = 73 + MsgAdoptedSSInsertRule = 74 + MsgAdoptedSSDeleteRule = 75 + MsgAdoptedSSAddOwner = 76 + MsgAdoptedSSRemoveOwner = 77 + MsgJSException = 78 + MsgZustand = 79 + MsgBatchMeta = 80 + MsgBatchMetadata = 81 + MsgPartitionedMessage = 82 + MsgIssueEvent = 125 + MsgSessionEnd = 126 + MsgSessionSearch = 127 + MsgIOSBatchMeta = 107 + MsgIOSSessionStart = 90 + MsgIOSSessionEnd = 91 + MsgIOSMetadata = 92 + MsgIOSCustomEvent = 93 + MsgIOSUserID = 94 + MsgIOSUserAnonymousID = 95 + MsgIOSScreenChanges = 96 + MsgIOSCrash = 97 + MsgIOSScreenEnter = 98 + MsgIOSScreenLeave = 99 + MsgIOSClickEvent = 100 + MsgIOSInputEvent = 101 + MsgIOSPerformanceEvent = 102 + MsgIOSLog = 103 + MsgIOSInternalError = 104 + MsgIOSNetworkCall = 105 + MsgIOSPerformanceAggregated = 110 + MsgIOSIssueEvent = 111 ) + type Timestamp struct { message Timestamp uint64 @@ -126,22 +124,22 @@ func (msg *Timestamp) TypeID() int { type SessionStart struct { message - Timestamp uint64 - ProjectID uint64 - TrackerVersion string - RevID string - UserUUID string - UserAgent string - UserOS string - UserOSVersion string - UserBrowser string - UserBrowserVersion string - UserDevice string - UserDeviceType string + Timestamp uint64 + ProjectID uint64 + TrackerVersion string + RevID string + UserUUID string + UserAgent string + UserOS string + UserOSVersion string + UserBrowser string + UserBrowserVersion string + UserDevice string + UserDeviceType string UserDeviceMemorySize uint64 - UserDeviceHeapSize uint64 - UserCountry string - UserID string + UserDeviceHeapSize uint64 + UserCountry string + UserID string } func (msg *SessionStart) Encode() []byte { @@ -198,8 +196,8 @@ func (msg *SessionEndDeprecated) TypeID() int { type SetPageLocation struct { message - URL string - Referrer string + URL string + Referrer string NavigationStart uint64 } @@ -223,7 +221,7 @@ func (msg *SetPageLocation) TypeID() int { type SetViewportSize struct { message - Width uint64 + Width uint64 Height uint64 } @@ -269,6 +267,7 @@ func (msg *SetViewportScroll) TypeID() int { type CreateDocument struct { message + } func (msg *CreateDocument) Encode() []byte { @@ -289,11 +288,11 @@ func (msg *CreateDocument) TypeID() int { type CreateElementNode struct { message - ID uint64 + ID uint64 ParentID uint64 - index uint64 - Tag string - SVG bool + index uint64 + Tag string + SVG bool } func (msg *CreateElementNode) Encode() []byte { @@ -318,9 +317,9 @@ func (msg *CreateElementNode) TypeID() int { type CreateTextNode struct { message - ID uint64 + ID uint64 ParentID uint64 - Index uint64 + Index uint64 } func (msg *CreateTextNode) Encode() []byte { @@ -343,9 +342,9 @@ func (msg *CreateTextNode) TypeID() int { type MoveNode struct { message - ID uint64 + ID uint64 ParentID uint64 - Index uint64 + Index uint64 } func (msg *MoveNode) Encode() []byte { @@ -389,8 +388,8 @@ func (msg *RemoveNode) TypeID() int { type SetNodeAttribute struct { message - ID uint64 - Name string + ID uint64 + Name string Value string } @@ -414,7 +413,7 @@ func (msg *SetNodeAttribute) TypeID() int { type RemoveNodeAttribute struct { message - ID uint64 + ID uint64 Name string } @@ -437,7 +436,7 @@ func (msg *RemoveNodeAttribute) TypeID() int { type SetNodeData struct { message - ID uint64 + ID uint64 Data string } @@ -460,7 +459,7 @@ func (msg *SetNodeData) TypeID() int { type SetCSSData struct { message - ID uint64 + ID uint64 Data string } @@ -484,8 +483,8 @@ func (msg *SetCSSData) TypeID() int { type SetNodeScroll struct { message ID uint64 - X int64 - Y int64 + X int64 + Y int64 } func (msg *SetNodeScroll) Encode() []byte { @@ -508,7 +507,7 @@ func (msg *SetNodeScroll) TypeID() int { type SetInputTarget struct { message - ID uint64 + ID uint64 Label string } @@ -531,9 +530,9 @@ func (msg *SetInputTarget) TypeID() int { type SetInputValue struct { message - ID uint64 + ID uint64 Value string - Mask int64 + Mask int64 } func (msg *SetInputValue) Encode() []byte { @@ -556,7 +555,7 @@ func (msg *SetInputValue) TypeID() int { type SetInputChecked struct { message - ID uint64 + ID uint64 Checked bool } @@ -602,14 +601,14 @@ func (msg *MouseMove) TypeID() int { type NetworkRequest struct { message - Type string - Method string - URL string - Request string - Response string - Status uint64 + Type string + Method string + URL string + Request string + Response string + Status uint64 Timestamp uint64 - Duration uint64 + Duration uint64 } func (msg *NetworkRequest) Encode() []byte { @@ -660,15 +659,15 @@ func (msg *ConsoleLog) TypeID() int { type PageLoadTiming struct { message - RequestStart uint64 - ResponseStart uint64 - ResponseEnd uint64 + RequestStart uint64 + ResponseStart uint64 + ResponseEnd uint64 DomContentLoadedEventStart uint64 - DomContentLoadedEventEnd uint64 - LoadEventStart uint64 - LoadEventEnd uint64 - FirstPaint uint64 - FirstContentfulPaint uint64 + DomContentLoadedEventEnd uint64 + LoadEventStart uint64 + LoadEventEnd uint64 + FirstPaint uint64 + FirstContentfulPaint uint64 } func (msg *PageLoadTiming) Encode() []byte { @@ -697,8 +696,8 @@ func (msg *PageLoadTiming) TypeID() int { type PageRenderTiming struct { message - SpeedIndex uint64 - VisuallyComplete uint64 + SpeedIndex uint64 + VisuallyComplete uint64 TimeToInteractive uint64 } @@ -722,7 +721,7 @@ func (msg *PageRenderTiming) TypeID() int { type JSExceptionDeprecated struct { message - Name string + Name string Message string Payload string } @@ -748,10 +747,10 @@ func (msg *JSExceptionDeprecated) TypeID() int { type IntegrationEvent struct { message Timestamp uint64 - Source string - Name string - Message string - Payload string + Source string + Name string + Message string + Payload string } func (msg *IntegrationEvent) Encode() []byte { @@ -776,7 +775,7 @@ func (msg *IntegrationEvent) TypeID() int { type CustomEvent struct { message - Name string + Name string Payload string } @@ -841,7 +840,7 @@ func (msg *UserAnonymousID) TypeID() int { type Metadata struct { message - Key string + Key string Value string } @@ -864,23 +863,23 @@ func (msg *Metadata) TypeID() int { type PageEvent struct { message - MessageID uint64 - Timestamp uint64 - URL string - Referrer string - Loaded bool - RequestStart uint64 - ResponseStart uint64 - ResponseEnd uint64 + MessageID uint64 + Timestamp uint64 + URL string + Referrer string + Loaded bool + RequestStart uint64 + ResponseStart uint64 + ResponseEnd uint64 DomContentLoadedEventStart uint64 - DomContentLoadedEventEnd uint64 - LoadEventStart uint64 - LoadEventEnd uint64 - FirstPaint uint64 - FirstContentfulPaint uint64 - SpeedIndex uint64 - VisuallyComplete uint64 - TimeToInteractive uint64 + DomContentLoadedEventEnd uint64 + LoadEventStart uint64 + LoadEventEnd uint64 + FirstPaint uint64 + FirstContentfulPaint uint64 + SpeedIndex uint64 + VisuallyComplete uint64 + TimeToInteractive uint64 } func (msg *PageEvent) Encode() []byte { @@ -917,11 +916,11 @@ func (msg *PageEvent) TypeID() int { type InputEvent struct { message - MessageID uint64 - Timestamp uint64 - Value string + MessageID uint64 + Timestamp uint64 + Value string ValueMasked bool - Label string + Label string } func (msg *InputEvent) Encode() []byte { @@ -944,82 +943,10 @@ func (msg *InputEvent) TypeID() int { return 32 } -type ClickEvent struct { - message - MessageID uint64 - Timestamp uint64 - HesitationTime uint64 - Label string - Selector string -} - -func (msg *ClickEvent) Encode() []byte { - buf := make([]byte, 51+len(msg.Label)+len(msg.Selector)) - buf[0] = 33 - p := 1 - p = WriteUint(msg.MessageID, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.HesitationTime, buf, p) - p = WriteString(msg.Label, buf, p) - p = WriteString(msg.Selector, buf, p) - return buf[:p] -} - -func (msg *ClickEvent) Decode() Message { - return msg -} - -func (msg *ClickEvent) TypeID() int { - return 33 -} - -type ResourceEvent struct { - message - MessageID uint64 - Timestamp uint64 - Duration uint64 - TTFB uint64 - HeaderSize uint64 - EncodedBodySize uint64 - DecodedBodySize uint64 - URL string - Type string - Success bool - Method string - Status uint64 -} - -func (msg *ResourceEvent) Encode() []byte { - buf := make([]byte, 121+len(msg.URL)+len(msg.Type)+len(msg.Method)) - buf[0] = 35 - p := 1 - p = WriteUint(msg.MessageID, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Duration, buf, p) - p = WriteUint(msg.TTFB, buf, p) - p = WriteUint(msg.HeaderSize, buf, p) - p = WriteUint(msg.EncodedBodySize, buf, p) - p = WriteUint(msg.DecodedBodySize, buf, p) - p = WriteString(msg.URL, buf, p) - p = WriteString(msg.Type, buf, p) - p = WriteBoolean(msg.Success, buf, p) - p = WriteString(msg.Method, buf, p) - p = WriteUint(msg.Status, buf, p) - return buf[:p] -} - -func (msg *ResourceEvent) Decode() Message { - return msg -} - -func (msg *ResourceEvent) TypeID() int { - return 35 -} - type CSSInsertRule struct { message - ID uint64 - Rule string + ID uint64 + Rule string Index uint64 } @@ -1043,7 +970,7 @@ func (msg *CSSInsertRule) TypeID() int { type CSSDeleteRule struct { message - ID uint64 + ID uint64 Index uint64 } @@ -1066,13 +993,13 @@ func (msg *CSSDeleteRule) TypeID() int { type Fetch struct { message - Method string - URL string - Request string - Response string - Status uint64 + Method string + URL string + Request string + Response string + Status uint64 Timestamp uint64 - Duration uint64 + Duration uint64 } func (msg *Fetch) Encode() []byte { @@ -1099,10 +1026,10 @@ func (msg *Fetch) TypeID() int { type Profiler struct { message - Name string + Name string Duration uint64 - Args string - Result string + Args string + Result string } func (msg *Profiler) Encode() []byte { @@ -1126,7 +1053,7 @@ func (msg *Profiler) TypeID() int { type OTable struct { message - Key string + Key string Value string } @@ -1170,8 +1097,8 @@ func (msg *StateAction) TypeID() int { type Redux struct { message - Action string - State string + Action string + State string Duration uint64 } @@ -1196,7 +1123,7 @@ func (msg *Redux) TypeID() int { type Vuex struct { message Mutation string - State string + State string } func (msg *Vuex) Encode() []byte { @@ -1218,7 +1145,7 @@ func (msg *Vuex) TypeID() int { type MobX struct { message - Type string + Type string Payload string } @@ -1241,8 +1168,8 @@ func (msg *MobX) TypeID() int { type NgRx struct { message - Action string - State string + Action string + State string Duration uint64 } @@ -1268,8 +1195,8 @@ type GraphQL struct { message OperationKind string OperationName string - Variables string - Response string + Variables string + Response string } func (msg *GraphQL) Encode() []byte { @@ -1293,10 +1220,10 @@ func (msg *GraphQL) TypeID() int { type PerformanceTrack struct { message - Frames int64 - Ticks int64 + Frames int64 + Ticks int64 TotalJSHeapSize uint64 - UsedJSHeapSize uint64 + UsedJSHeapSize uint64 } func (msg *PerformanceTrack) Encode() []byte { @@ -1320,7 +1247,7 @@ func (msg *PerformanceTrack) TypeID() int { type StringDict struct { message - Key uint64 + Key uint64 Value string } @@ -1343,8 +1270,8 @@ func (msg *StringDict) TypeID() int { type SetNodeAttributeDict struct { message - ID uint64 - NameKey uint64 + ID uint64 + NameKey uint64 ValueKey uint64 } @@ -1366,37 +1293,16 @@ func (msg *SetNodeAttributeDict) TypeID() int { return 51 } -type DOMDrop struct { - message - Timestamp uint64 -} - -func (msg *DOMDrop) Encode() []byte { - buf := make([]byte, 11) - buf[0] = 52 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - return buf[:p] -} - -func (msg *DOMDrop) Decode() Message { - return msg -} - -func (msg *DOMDrop) TypeID() int { - return 52 -} - type ResourceTiming struct { message - Timestamp uint64 - Duration uint64 - TTFB uint64 - HeaderSize uint64 + Timestamp uint64 + Duration uint64 + TTFB uint64 + HeaderSize uint64 EncodedBodySize uint64 DecodedBodySize uint64 - URL string - Initiator string + URL string + Initiator string } func (msg *ResourceTiming) Encode() []byte { @@ -1425,7 +1331,7 @@ func (msg *ResourceTiming) TypeID() int { type ConnectionInformation struct { message Downlink uint64 - Type string + Type string } func (msg *ConnectionInformation) Encode() []byte { @@ -1468,20 +1374,20 @@ func (msg *SetPageVisibility) TypeID() int { type PerformanceTrackAggr struct { message - TimestampStart uint64 - TimestampEnd uint64 - MinFPS uint64 - AvgFPS uint64 - MaxFPS uint64 - MinCPU uint64 - AvgCPU uint64 - MaxCPU uint64 + TimestampStart uint64 + TimestampEnd uint64 + MinFPS uint64 + AvgFPS uint64 + MaxFPS uint64 + MinCPU uint64 + AvgCPU uint64 + MaxCPU uint64 MinTotalJSHeapSize uint64 AvgTotalJSHeapSize uint64 MaxTotalJSHeapSize uint64 - MinUsedJSHeapSize uint64 - AvgUsedJSHeapSize uint64 - MaxUsedJSHeapSize uint64 + MinUsedJSHeapSize uint64 + AvgUsedJSHeapSize uint64 + MaxUsedJSHeapSize uint64 } func (msg *PerformanceTrackAggr) Encode() []byte { @@ -1515,9 +1421,9 @@ func (msg *PerformanceTrackAggr) TypeID() int { type LoadFontFace struct { message - ParentID uint64 - Family string - Source string + ParentID uint64 + Family string + Source string Descriptors string } @@ -1563,12 +1469,12 @@ func (msg *SetNodeFocus) TypeID() int { type LongTask struct { message - Timestamp uint64 - Duration uint64 - Context uint64 + Timestamp uint64 + Duration uint64 + Context uint64 ContainerType uint64 - ContainerSrc string - ContainerId string + ContainerSrc string + ContainerId string ContainerName string } @@ -1596,9 +1502,9 @@ func (msg *LongTask) TypeID() int { type SetNodeAttributeURLBased struct { message - ID uint64 - Name string - Value string + ID uint64 + Name string + Value string BaseURL string } @@ -1623,8 +1529,8 @@ func (msg *SetNodeAttributeURLBased) TypeID() int { type SetCSSDataURLBased struct { message - ID uint64 - Data string + ID uint64 + Data string BaseURL string } @@ -1648,12 +1554,12 @@ func (msg *SetCSSDataURLBased) TypeID() int { type IssueEventDeprecated struct { message - MessageID uint64 - Timestamp uint64 - Type string + MessageID uint64 + Timestamp uint64 + Type string ContextString string - Context string - Payload string + Context string + Payload string } func (msg *IssueEventDeprecated) Encode() []byte { @@ -1679,7 +1585,7 @@ func (msg *IssueEventDeprecated) TypeID() int { type TechnicalInfo struct { message - Type string + Type string Value string } @@ -1702,7 +1608,7 @@ func (msg *TechnicalInfo) TypeID() int { type CustomIssue struct { message - Name string + Name string Payload string } @@ -1746,9 +1652,9 @@ func (msg *AssetCache) TypeID() int { type CSSInsertRuleURLBased struct { message - ID uint64 - Rule string - Index uint64 + ID uint64 + Rule string + Index uint64 BaseURL string } @@ -1773,10 +1679,10 @@ func (msg *CSSInsertRuleURLBased) TypeID() int { type MouseClick struct { message - ID uint64 + ID uint64 HesitationTime uint64 - Label string - Selector string + Label string + Selector string } func (msg *MouseClick) Encode() []byte { @@ -1801,7 +1707,7 @@ func (msg *MouseClick) TypeID() int { type CreateIFrameDocument struct { message FrameID uint64 - ID uint64 + ID uint64 } func (msg *CreateIFrameDocument) Encode() []byte { @@ -1824,7 +1730,7 @@ func (msg *CreateIFrameDocument) TypeID() int { type AdoptedSSReplaceURLBased struct { message SheetID uint64 - Text string + Text string BaseURL string } @@ -1849,7 +1755,7 @@ func (msg *AdoptedSSReplaceURLBased) TypeID() int { type AdoptedSSReplace struct { message SheetID uint64 - Text string + Text string } func (msg *AdoptedSSReplace) Encode() []byte { @@ -1872,8 +1778,8 @@ func (msg *AdoptedSSReplace) TypeID() int { type AdoptedSSInsertRuleURLBased struct { message SheetID uint64 - Rule string - Index uint64 + Rule string + Index uint64 BaseURL string } @@ -1899,8 +1805,8 @@ func (msg *AdoptedSSInsertRuleURLBased) TypeID() int { type AdoptedSSInsertRule struct { message SheetID uint64 - Rule string - Index uint64 + Rule string + Index uint64 } func (msg *AdoptedSSInsertRule) Encode() []byte { @@ -1924,7 +1830,7 @@ func (msg *AdoptedSSInsertRule) TypeID() int { type AdoptedSSDeleteRule struct { message SheetID uint64 - Index uint64 + Index uint64 } func (msg *AdoptedSSDeleteRule) Encode() []byte { @@ -1947,7 +1853,7 @@ func (msg *AdoptedSSDeleteRule) TypeID() int { type AdoptedSSAddOwner struct { message SheetID uint64 - ID uint64 + ID uint64 } func (msg *AdoptedSSAddOwner) Encode() []byte { @@ -1970,7 +1876,7 @@ func (msg *AdoptedSSAddOwner) TypeID() int { type AdoptedSSRemoveOwner struct { message SheetID uint64 - ID uint64 + ID uint64 } func (msg *AdoptedSSRemoveOwner) Encode() []byte { @@ -1992,9 +1898,9 @@ func (msg *AdoptedSSRemoveOwner) TypeID() int { type JSException struct { message - Name string - Message string - Payload string + Name string + Message string + Payload string Metadata string } @@ -2020,7 +1926,7 @@ func (msg *JSException) TypeID() int { type Zustand struct { message Mutation string - State string + State string } func (msg *Zustand) Encode() []byte { @@ -2042,9 +1948,9 @@ func (msg *Zustand) TypeID() int { type BatchMeta struct { message - PageNo uint64 + PageNo uint64 FirstIndex uint64 - Timestamp int64 + Timestamp int64 } func (msg *BatchMeta) Encode() []byte { @@ -2067,11 +1973,11 @@ func (msg *BatchMeta) TypeID() int { type BatchMetadata struct { message - Version uint64 - PageNo uint64 + Version uint64 + PageNo uint64 FirstIndex uint64 - Timestamp int64 - Location string + Timestamp int64 + Location string } func (msg *BatchMetadata) Encode() []byte { @@ -2096,7 +2002,7 @@ func (msg *BatchMetadata) TypeID() int { type PartitionedMessage struct { message - PartNo uint64 + PartNo uint64 PartTotal uint64 } @@ -2119,13 +2025,13 @@ func (msg *PartitionedMessage) TypeID() int { type IssueEvent struct { message - MessageID uint64 - Timestamp uint64 - Type string + MessageID uint64 + Timestamp uint64 + Type string ContextString string - Context string - Payload string - URL string + Context string + Payload string + URL string } func (msg *IssueEvent) Encode() []byte { @@ -2152,7 +2058,7 @@ func (msg *IssueEvent) TypeID() int { type SessionEnd struct { message - Timestamp uint64 + Timestamp uint64 EncryptionKey string } @@ -2198,8 +2104,8 @@ func (msg *SessionSearch) TypeID() int { type IOSBatchMeta struct { message - Timestamp uint64 - Length uint64 + Timestamp uint64 + Length uint64 FirstIndex uint64 } @@ -2223,16 +2129,16 @@ func (msg *IOSBatchMeta) TypeID() int { type IOSSessionStart struct { message - Timestamp uint64 - ProjectID uint64 + Timestamp uint64 + ProjectID uint64 TrackerVersion string - RevID string - UserUUID string - UserOS string - UserOSVersion string - UserDevice string + RevID string + UserUUID string + UserOS string + UserOSVersion string + UserDevice string UserDeviceType string - UserCountry string + UserCountry string } func (msg *IOSSessionStart) Encode() []byte { @@ -2284,9 +2190,9 @@ func (msg *IOSSessionEnd) TypeID() int { type IOSMetadata struct { message Timestamp uint64 - Length uint64 - Key string - Value string + Length uint64 + Key string + Value string } func (msg *IOSMetadata) Encode() []byte { @@ -2311,9 +2217,9 @@ func (msg *IOSMetadata) TypeID() int { type IOSCustomEvent struct { message Timestamp uint64 - Length uint64 - Name string - Payload string + Length uint64 + Name string + Payload string } func (msg *IOSCustomEvent) Encode() []byte { @@ -2338,8 +2244,8 @@ func (msg *IOSCustomEvent) TypeID() int { type IOSUserID struct { message Timestamp uint64 - Length uint64 - Value string + Length uint64 + Value string } func (msg *IOSUserID) Encode() []byte { @@ -2363,8 +2269,8 @@ func (msg *IOSUserID) TypeID() int { type IOSUserAnonymousID struct { message Timestamp uint64 - Length uint64 - Value string + Length uint64 + Value string } func (msg *IOSUserAnonymousID) Encode() []byte { @@ -2388,11 +2294,11 @@ func (msg *IOSUserAnonymousID) TypeID() int { type IOSScreenChanges struct { message Timestamp uint64 - Length uint64 - X uint64 - Y uint64 - Width uint64 - Height uint64 + Length uint64 + X uint64 + Y uint64 + Width uint64 + Height uint64 } func (msg *IOSScreenChanges) Encode() []byte { @@ -2418,10 +2324,10 @@ func (msg *IOSScreenChanges) TypeID() int { type IOSCrash struct { message - Timestamp uint64 - Length uint64 - Name string - Reason string + Timestamp uint64 + Length uint64 + Name string + Reason string Stacktrace string } @@ -2448,9 +2354,9 @@ func (msg *IOSCrash) TypeID() int { type IOSScreenEnter struct { message Timestamp uint64 - Length uint64 - Title string - ViewName string + Length uint64 + Title string + ViewName string } func (msg *IOSScreenEnter) Encode() []byte { @@ -2475,9 +2381,9 @@ func (msg *IOSScreenEnter) TypeID() int { type IOSScreenLeave struct { message Timestamp uint64 - Length uint64 - Title string - ViewName string + Length uint64 + Title string + ViewName string } func (msg *IOSScreenLeave) Encode() []byte { @@ -2502,10 +2408,10 @@ func (msg *IOSScreenLeave) TypeID() int { type IOSClickEvent struct { message Timestamp uint64 - Length uint64 - Label string - X uint64 - Y uint64 + Length uint64 + Label string + X uint64 + Y uint64 } func (msg *IOSClickEvent) Encode() []byte { @@ -2530,11 +2436,11 @@ func (msg *IOSClickEvent) TypeID() int { type IOSInputEvent struct { message - Timestamp uint64 - Length uint64 - Value string + Timestamp uint64 + Length uint64 + Value string ValueMasked bool - Label string + Label string } func (msg *IOSInputEvent) Encode() []byte { @@ -2560,9 +2466,9 @@ func (msg *IOSInputEvent) TypeID() int { type IOSPerformanceEvent struct { message Timestamp uint64 - Length uint64 - Name string - Value uint64 + Length uint64 + Name string + Value uint64 } func (msg *IOSPerformanceEvent) Encode() []byte { @@ -2587,9 +2493,9 @@ func (msg *IOSPerformanceEvent) TypeID() int { type IOSLog struct { message Timestamp uint64 - Length uint64 - Severity string - Content string + Length uint64 + Severity string + Content string } func (msg *IOSLog) Encode() []byte { @@ -2614,8 +2520,8 @@ func (msg *IOSLog) TypeID() int { type IOSInternalError struct { message Timestamp uint64 - Length uint64 - Content string + Length uint64 + Content string } func (msg *IOSInternalError) Encode() []byte { @@ -2639,14 +2545,14 @@ func (msg *IOSInternalError) TypeID() int { type IOSNetworkCall struct { message Timestamp uint64 - Length uint64 - Duration uint64 - Headers string - Body string - URL string - Success bool - Method string - Status uint64 + Length uint64 + Duration uint64 + Headers string + Body string + URL string + Success bool + Method string + Status uint64 } func (msg *IOSNetworkCall) Encode() []byte { @@ -2676,19 +2582,19 @@ func (msg *IOSNetworkCall) TypeID() int { type IOSPerformanceAggregated struct { message TimestampStart uint64 - TimestampEnd uint64 - MinFPS uint64 - AvgFPS uint64 - MaxFPS uint64 - MinCPU uint64 - AvgCPU uint64 - MaxCPU uint64 - MinMemory uint64 - AvgMemory uint64 - MaxMemory uint64 - MinBattery uint64 - AvgBattery uint64 - MaxBattery uint64 + TimestampEnd uint64 + MinFPS uint64 + AvgFPS uint64 + MaxFPS uint64 + MinCPU uint64 + AvgCPU uint64 + MaxCPU uint64 + MinMemory uint64 + AvgMemory uint64 + MaxMemory uint64 + MinBattery uint64 + AvgBattery uint64 + MaxBattery uint64 } func (msg *IOSPerformanceAggregated) Encode() []byte { @@ -2722,11 +2628,11 @@ func (msg *IOSPerformanceAggregated) TypeID() int { type IOSIssueEvent struct { message - Timestamp uint64 - Type string + Timestamp uint64 + Type string ContextString string - Context string - Payload string + Context string + Payload string } func (msg *IOSIssueEvent) Encode() []byte { @@ -2748,3 +2654,4 @@ func (msg *IOSIssueEvent) Decode() Message { func (msg *IOSIssueEvent) TypeID() int { return 111 } + diff --git a/backend/pkg/messages/raw.go b/backend/pkg/messages/raw.go index 44f666c69..ae8b97365 100644 --- a/backend/pkg/messages/raw.go +++ b/backend/pkg/messages/raw.go @@ -42,3 +42,17 @@ func (m *RawMessage) SessionID() uint64 { } return 0 } + +func (m *RawMessage) MsgID() uint64 { + if m.meta != nil { + return m.meta.Index + } + return 0 +} + +func (m *RawMessage) Time() uint64 { + if m.meta != nil { + return m.meta.Timestamp + } + return 0 +} diff --git a/backend/pkg/messages/read-message.go b/backend/pkg/messages/read-message.go index ecc00183f..7b0fc37ea 100644 --- a/backend/pkg/messages/read-message.go +++ b/backend/pkg/messages/read-message.go @@ -6,1744 +6,1672 @@ import ( ) func DecodeTimestamp(reader BytesReader) (Message, error) { - var err error = nil - msg := &Timestamp{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &Timestamp{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } + return msg, err } func DecodeSessionStart(reader BytesReader) (Message, error) { - var err error = nil - msg := &SessionStart{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &SessionStart{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.ProjectID, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.TrackerVersion, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.RevID, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.UserUUID, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.UserAgent, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.UserOS, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.UserOSVersion, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.UserBrowser, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.UserBrowserVersion, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.UserDevice, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.UserDeviceType, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.UserDeviceMemorySize, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.UserDeviceHeapSize, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.UserCountry, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.UserID, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeSessionEndDeprecated(reader BytesReader) (Message, error) { - var err error = nil - msg := &SessionEndDeprecated{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &SessionEndDeprecated{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } + return msg, err } func DecodeSetPageLocation(reader BytesReader) (Message, error) { - var err error = nil - msg := &SetPageLocation{} - if msg.URL, err = reader.ReadString(); err != nil { - return nil, err - } + var err error = nil + msg := &SetPageLocation{} + if msg.URL, err = reader.ReadString(); err != nil { + return nil, err + } if msg.Referrer, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.NavigationStart, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeSetViewportSize(reader BytesReader) (Message, error) { - var err error = nil - msg := &SetViewportSize{} - if msg.Width, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &SetViewportSize{} + if msg.Width, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Height, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeSetViewportScroll(reader BytesReader) (Message, error) { - var err error = nil - msg := &SetViewportScroll{} - if msg.X, err = reader.ReadInt(); err != nil { - return nil, err - } + var err error = nil + msg := &SetViewportScroll{} + if msg.X, err = reader.ReadInt(); err != nil { + return nil, err + } if msg.Y, err = reader.ReadInt(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeCreateDocument(reader BytesReader) (Message, error) { - var err error = nil - msg := &CreateDocument{} - - return msg, err + var err error = nil + msg := &CreateDocument{} + + return msg, err } func DecodeCreateElementNode(reader BytesReader) (Message, error) { - var err error = nil - msg := &CreateElementNode{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &CreateElementNode{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.ParentID, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.index, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Tag, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.SVG, err = reader.ReadBoolean(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeCreateTextNode(reader BytesReader) (Message, error) { - var err error = nil - msg := &CreateTextNode{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &CreateTextNode{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.ParentID, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Index, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeMoveNode(reader BytesReader) (Message, error) { - var err error = nil - msg := &MoveNode{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &MoveNode{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.ParentID, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Index, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeRemoveNode(reader BytesReader) (Message, error) { - var err error = nil - msg := &RemoveNode{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &RemoveNode{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } + return msg, err } func DecodeSetNodeAttribute(reader BytesReader) (Message, error) { - var err error = nil - msg := &SetNodeAttribute{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &SetNodeAttribute{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Name, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Value, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeRemoveNodeAttribute(reader BytesReader) (Message, error) { - var err error = nil - msg := &RemoveNodeAttribute{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &RemoveNodeAttribute{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Name, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeSetNodeData(reader BytesReader) (Message, error) { - var err error = nil - msg := &SetNodeData{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &SetNodeData{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Data, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeSetCSSData(reader BytesReader) (Message, error) { - var err error = nil - msg := &SetCSSData{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &SetCSSData{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Data, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeSetNodeScroll(reader BytesReader) (Message, error) { - var err error = nil - msg := &SetNodeScroll{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &SetNodeScroll{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.X, err = reader.ReadInt(); err != nil { - return nil, err - } + return nil, err + } if msg.Y, err = reader.ReadInt(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeSetInputTarget(reader BytesReader) (Message, error) { - var err error = nil - msg := &SetInputTarget{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &SetInputTarget{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Label, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeSetInputValue(reader BytesReader) (Message, error) { - var err error = nil - msg := &SetInputValue{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &SetInputValue{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Value, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Mask, err = reader.ReadInt(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeSetInputChecked(reader BytesReader) (Message, error) { - var err error = nil - msg := &SetInputChecked{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &SetInputChecked{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Checked, err = reader.ReadBoolean(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeMouseMove(reader BytesReader) (Message, error) { - var err error = nil - msg := &MouseMove{} - if msg.X, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &MouseMove{} + if msg.X, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Y, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeNetworkRequest(reader BytesReader) (Message, error) { - var err error = nil - msg := &NetworkRequest{} - if msg.Type, err = reader.ReadString(); err != nil { - return nil, err - } + var err error = nil + msg := &NetworkRequest{} + if msg.Type, err = reader.ReadString(); err != nil { + return nil, err + } if msg.Method, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.URL, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Request, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Response, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Status, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Duration, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeConsoleLog(reader BytesReader) (Message, error) { - var err error = nil - msg := &ConsoleLog{} - if msg.Level, err = reader.ReadString(); err != nil { - return nil, err - } + var err error = nil + msg := &ConsoleLog{} + if msg.Level, err = reader.ReadString(); err != nil { + return nil, err + } if msg.Value, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodePageLoadTiming(reader BytesReader) (Message, error) { - var err error = nil - msg := &PageLoadTiming{} - if msg.RequestStart, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &PageLoadTiming{} + if msg.RequestStart, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.ResponseStart, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.ResponseEnd, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.DomContentLoadedEventStart, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.DomContentLoadedEventEnd, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.LoadEventStart, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.LoadEventEnd, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.FirstPaint, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.FirstContentfulPaint, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodePageRenderTiming(reader BytesReader) (Message, error) { - var err error = nil - msg := &PageRenderTiming{} - if msg.SpeedIndex, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &PageRenderTiming{} + if msg.SpeedIndex, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.VisuallyComplete, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.TimeToInteractive, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeJSExceptionDeprecated(reader BytesReader) (Message, error) { - var err error = nil - msg := &JSExceptionDeprecated{} - if msg.Name, err = reader.ReadString(); err != nil { - return nil, err - } + var err error = nil + msg := &JSExceptionDeprecated{} + if msg.Name, err = reader.ReadString(); err != nil { + return nil, err + } if msg.Message, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Payload, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIntegrationEvent(reader BytesReader) (Message, error) { - var err error = nil - msg := &IntegrationEvent{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IntegrationEvent{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Source, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Name, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Message, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Payload, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeCustomEvent(reader BytesReader) (Message, error) { - var err error = nil - msg := &CustomEvent{} - if msg.Name, err = reader.ReadString(); err != nil { - return nil, err - } + var err error = nil + msg := &CustomEvent{} + if msg.Name, err = reader.ReadString(); err != nil { + return nil, err + } if msg.Payload, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeUserID(reader BytesReader) (Message, error) { - var err error = nil - msg := &UserID{} - if msg.ID, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &UserID{} + if msg.ID, err = reader.ReadString(); err != nil { + return nil, err + } + return msg, err } func DecodeUserAnonymousID(reader BytesReader) (Message, error) { - var err error = nil - msg := &UserAnonymousID{} - if msg.ID, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &UserAnonymousID{} + if msg.ID, err = reader.ReadString(); err != nil { + return nil, err + } + return msg, err } func DecodeMetadata(reader BytesReader) (Message, error) { - var err error = nil - msg := &Metadata{} - if msg.Key, err = reader.ReadString(); err != nil { - return nil, err - } + var err error = nil + msg := &Metadata{} + if msg.Key, err = reader.ReadString(); err != nil { + return nil, err + } if msg.Value, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodePageEvent(reader BytesReader) (Message, error) { - var err error = nil - msg := &PageEvent{} - if msg.MessageID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &PageEvent{} + if msg.MessageID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.URL, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Referrer, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Loaded, err = reader.ReadBoolean(); err != nil { - return nil, err - } + return nil, err + } if msg.RequestStart, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.ResponseStart, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.ResponseEnd, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.DomContentLoadedEventStart, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.DomContentLoadedEventEnd, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.LoadEventStart, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.LoadEventEnd, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.FirstPaint, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.FirstContentfulPaint, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.SpeedIndex, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.VisuallyComplete, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.TimeToInteractive, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeInputEvent(reader BytesReader) (Message, error) { - var err error = nil - msg := &InputEvent{} - if msg.MessageID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &InputEvent{} + if msg.MessageID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Value, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.ValueMasked, err = reader.ReadBoolean(); err != nil { - return nil, err - } + return nil, err + } if msg.Label, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err -} - -func DecodeClickEvent(reader BytesReader) (Message, error) { - var err error = nil - msg := &ClickEvent{} - if msg.MessageID, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.HesitationTime, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.Label, err = reader.ReadString(); err != nil { - return nil, err - } - if msg.Selector, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err -} - -func DecodeResourceEvent(reader BytesReader) (Message, error) { - var err error = nil - msg := &ResourceEvent{} - if msg.MessageID, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.Duration, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.TTFB, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.HeaderSize, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.EncodedBodySize, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.DecodedBodySize, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.URL, err = reader.ReadString(); err != nil { - return nil, err - } - if msg.Type, err = reader.ReadString(); err != nil { - return nil, err - } - if msg.Success, err = reader.ReadBoolean(); err != nil { - return nil, err - } - if msg.Method, err = reader.ReadString(); err != nil { - return nil, err - } - if msg.Status, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeCSSInsertRule(reader BytesReader) (Message, error) { - var err error = nil - msg := &CSSInsertRule{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &CSSInsertRule{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Rule, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Index, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeCSSDeleteRule(reader BytesReader) (Message, error) { - var err error = nil - msg := &CSSDeleteRule{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &CSSDeleteRule{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Index, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeFetch(reader BytesReader) (Message, error) { - var err error = nil - msg := &Fetch{} - if msg.Method, err = reader.ReadString(); err != nil { - return nil, err - } + var err error = nil + msg := &Fetch{} + if msg.Method, err = reader.ReadString(); err != nil { + return nil, err + } if msg.URL, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Request, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Response, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Status, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Duration, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeProfiler(reader BytesReader) (Message, error) { - var err error = nil - msg := &Profiler{} - if msg.Name, err = reader.ReadString(); err != nil { - return nil, err - } + var err error = nil + msg := &Profiler{} + if msg.Name, err = reader.ReadString(); err != nil { + return nil, err + } if msg.Duration, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Args, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Result, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeOTable(reader BytesReader) (Message, error) { - var err error = nil - msg := &OTable{} - if msg.Key, err = reader.ReadString(); err != nil { - return nil, err - } + var err error = nil + msg := &OTable{} + if msg.Key, err = reader.ReadString(); err != nil { + return nil, err + } if msg.Value, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeStateAction(reader BytesReader) (Message, error) { - var err error = nil - msg := &StateAction{} - if msg.Type, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &StateAction{} + if msg.Type, err = reader.ReadString(); err != nil { + return nil, err + } + return msg, err } func DecodeRedux(reader BytesReader) (Message, error) { - var err error = nil - msg := &Redux{} - if msg.Action, err = reader.ReadString(); err != nil { - return nil, err - } + var err error = nil + msg := &Redux{} + if msg.Action, err = reader.ReadString(); err != nil { + return nil, err + } if msg.State, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Duration, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeVuex(reader BytesReader) (Message, error) { - var err error = nil - msg := &Vuex{} - if msg.Mutation, err = reader.ReadString(); err != nil { - return nil, err - } + var err error = nil + msg := &Vuex{} + if msg.Mutation, err = reader.ReadString(); err != nil { + return nil, err + } if msg.State, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeMobX(reader BytesReader) (Message, error) { - var err error = nil - msg := &MobX{} - if msg.Type, err = reader.ReadString(); err != nil { - return nil, err - } + var err error = nil + msg := &MobX{} + if msg.Type, err = reader.ReadString(); err != nil { + return nil, err + } if msg.Payload, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeNgRx(reader BytesReader) (Message, error) { - var err error = nil - msg := &NgRx{} - if msg.Action, err = reader.ReadString(); err != nil { - return nil, err - } + var err error = nil + msg := &NgRx{} + if msg.Action, err = reader.ReadString(); err != nil { + return nil, err + } if msg.State, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Duration, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeGraphQL(reader BytesReader) (Message, error) { - var err error = nil - msg := &GraphQL{} - if msg.OperationKind, err = reader.ReadString(); err != nil { - return nil, err - } + var err error = nil + msg := &GraphQL{} + if msg.OperationKind, err = reader.ReadString(); err != nil { + return nil, err + } if msg.OperationName, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Variables, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Response, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodePerformanceTrack(reader BytesReader) (Message, error) { - var err error = nil - msg := &PerformanceTrack{} - if msg.Frames, err = reader.ReadInt(); err != nil { - return nil, err - } + var err error = nil + msg := &PerformanceTrack{} + if msg.Frames, err = reader.ReadInt(); err != nil { + return nil, err + } if msg.Ticks, err = reader.ReadInt(); err != nil { - return nil, err - } + return nil, err + } if msg.TotalJSHeapSize, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.UsedJSHeapSize, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeStringDict(reader BytesReader) (Message, error) { - var err error = nil - msg := &StringDict{} - if msg.Key, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &StringDict{} + if msg.Key, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Value, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeSetNodeAttributeDict(reader BytesReader) (Message, error) { - var err error = nil - msg := &SetNodeAttributeDict{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &SetNodeAttributeDict{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.NameKey, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.ValueKey, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err -} - -func DecodeDOMDrop(reader BytesReader) (Message, error) { - var err error = nil - msg := &DOMDrop{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeResourceTiming(reader BytesReader) (Message, error) { - var err error = nil - msg := &ResourceTiming{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &ResourceTiming{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Duration, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.TTFB, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.HeaderSize, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.EncodedBodySize, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.DecodedBodySize, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.URL, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Initiator, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeConnectionInformation(reader BytesReader) (Message, error) { - var err error = nil - msg := &ConnectionInformation{} - if msg.Downlink, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &ConnectionInformation{} + if msg.Downlink, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Type, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeSetPageVisibility(reader BytesReader) (Message, error) { - var err error = nil - msg := &SetPageVisibility{} - if msg.hidden, err = reader.ReadBoolean(); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &SetPageVisibility{} + if msg.hidden, err = reader.ReadBoolean(); err != nil { + return nil, err + } + return msg, err } func DecodePerformanceTrackAggr(reader BytesReader) (Message, error) { - var err error = nil - msg := &PerformanceTrackAggr{} - if msg.TimestampStart, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &PerformanceTrackAggr{} + if msg.TimestampStart, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.TimestampEnd, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.MinFPS, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.AvgFPS, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.MaxFPS, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.MinCPU, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.AvgCPU, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.MaxCPU, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.MinTotalJSHeapSize, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.AvgTotalJSHeapSize, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.MaxTotalJSHeapSize, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.MinUsedJSHeapSize, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.AvgUsedJSHeapSize, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.MaxUsedJSHeapSize, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeLoadFontFace(reader BytesReader) (Message, error) { - var err error = nil - msg := &LoadFontFace{} - if msg.ParentID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &LoadFontFace{} + if msg.ParentID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Family, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Source, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Descriptors, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeSetNodeFocus(reader BytesReader) (Message, error) { - var err error = nil - msg := &SetNodeFocus{} - if msg.ID, err = reader.ReadInt(); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &SetNodeFocus{} + if msg.ID, err = reader.ReadInt(); err != nil { + return nil, err + } + return msg, err } func DecodeLongTask(reader BytesReader) (Message, error) { - var err error = nil - msg := &LongTask{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &LongTask{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Duration, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Context, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.ContainerType, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.ContainerSrc, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.ContainerId, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.ContainerName, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeSetNodeAttributeURLBased(reader BytesReader) (Message, error) { - var err error = nil - msg := &SetNodeAttributeURLBased{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &SetNodeAttributeURLBased{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Name, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Value, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.BaseURL, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeSetCSSDataURLBased(reader BytesReader) (Message, error) { - var err error = nil - msg := &SetCSSDataURLBased{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &SetCSSDataURLBased{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Data, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.BaseURL, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIssueEventDeprecated(reader BytesReader) (Message, error) { - var err error = nil - msg := &IssueEventDeprecated{} - if msg.MessageID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IssueEventDeprecated{} + if msg.MessageID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Type, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.ContextString, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Context, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Payload, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeTechnicalInfo(reader BytesReader) (Message, error) { - var err error = nil - msg := &TechnicalInfo{} - if msg.Type, err = reader.ReadString(); err != nil { - return nil, err - } + var err error = nil + msg := &TechnicalInfo{} + if msg.Type, err = reader.ReadString(); err != nil { + return nil, err + } if msg.Value, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeCustomIssue(reader BytesReader) (Message, error) { - var err error = nil - msg := &CustomIssue{} - if msg.Name, err = reader.ReadString(); err != nil { - return nil, err - } + var err error = nil + msg := &CustomIssue{} + if msg.Name, err = reader.ReadString(); err != nil { + return nil, err + } if msg.Payload, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeAssetCache(reader BytesReader) (Message, error) { - var err error = nil - msg := &AssetCache{} - if msg.URL, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &AssetCache{} + if msg.URL, err = reader.ReadString(); err != nil { + return nil, err + } + return msg, err } func DecodeCSSInsertRuleURLBased(reader BytesReader) (Message, error) { - var err error = nil - msg := &CSSInsertRuleURLBased{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &CSSInsertRuleURLBased{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Rule, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Index, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.BaseURL, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeMouseClick(reader BytesReader) (Message, error) { - var err error = nil - msg := &MouseClick{} - if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &MouseClick{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.HesitationTime, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Label, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Selector, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeCreateIFrameDocument(reader BytesReader) (Message, error) { - var err error = nil - msg := &CreateIFrameDocument{} - if msg.FrameID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &CreateIFrameDocument{} + if msg.FrameID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeAdoptedSSReplaceURLBased(reader BytesReader) (Message, error) { - var err error = nil - msg := &AdoptedSSReplaceURLBased{} - if msg.SheetID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &AdoptedSSReplaceURLBased{} + if msg.SheetID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Text, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.BaseURL, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeAdoptedSSReplace(reader BytesReader) (Message, error) { - var err error = nil - msg := &AdoptedSSReplace{} - if msg.SheetID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &AdoptedSSReplace{} + if msg.SheetID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Text, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeAdoptedSSInsertRuleURLBased(reader BytesReader) (Message, error) { - var err error = nil - msg := &AdoptedSSInsertRuleURLBased{} - if msg.SheetID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &AdoptedSSInsertRuleURLBased{} + if msg.SheetID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Rule, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Index, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.BaseURL, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeAdoptedSSInsertRule(reader BytesReader) (Message, error) { - var err error = nil - msg := &AdoptedSSInsertRule{} - if msg.SheetID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &AdoptedSSInsertRule{} + if msg.SheetID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Rule, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Index, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeAdoptedSSDeleteRule(reader BytesReader) (Message, error) { - var err error = nil - msg := &AdoptedSSDeleteRule{} - if msg.SheetID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &AdoptedSSDeleteRule{} + if msg.SheetID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Index, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeAdoptedSSAddOwner(reader BytesReader) (Message, error) { - var err error = nil - msg := &AdoptedSSAddOwner{} - if msg.SheetID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &AdoptedSSAddOwner{} + if msg.SheetID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeAdoptedSSRemoveOwner(reader BytesReader) (Message, error) { - var err error = nil - msg := &AdoptedSSRemoveOwner{} - if msg.SheetID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &AdoptedSSRemoveOwner{} + if msg.SheetID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.ID, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeJSException(reader BytesReader) (Message, error) { - var err error = nil - msg := &JSException{} - if msg.Name, err = reader.ReadString(); err != nil { - return nil, err - } + var err error = nil + msg := &JSException{} + if msg.Name, err = reader.ReadString(); err != nil { + return nil, err + } if msg.Message, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Payload, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Metadata, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeZustand(reader BytesReader) (Message, error) { - var err error = nil - msg := &Zustand{} - if msg.Mutation, err = reader.ReadString(); err != nil { - return nil, err - } + var err error = nil + msg := &Zustand{} + if msg.Mutation, err = reader.ReadString(); err != nil { + return nil, err + } if msg.State, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeBatchMeta(reader BytesReader) (Message, error) { - var err error = nil - msg := &BatchMeta{} - if msg.PageNo, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &BatchMeta{} + if msg.PageNo, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.FirstIndex, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Timestamp, err = reader.ReadInt(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeBatchMetadata(reader BytesReader) (Message, error) { - var err error = nil - msg := &BatchMetadata{} - if msg.Version, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &BatchMetadata{} + if msg.Version, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.PageNo, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.FirstIndex, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Timestamp, err = reader.ReadInt(); err != nil { - return nil, err - } + return nil, err + } if msg.Location, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodePartitionedMessage(reader BytesReader) (Message, error) { - var err error = nil - msg := &PartitionedMessage{} - if msg.PartNo, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &PartitionedMessage{} + if msg.PartNo, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.PartTotal, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIssueEvent(reader BytesReader) (Message, error) { - var err error = nil - msg := &IssueEvent{} - if msg.MessageID, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IssueEvent{} + if msg.MessageID, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Type, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.ContextString, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Context, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Payload, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.URL, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeSessionEnd(reader BytesReader) (Message, error) { - var err error = nil - msg := &SessionEnd{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &SessionEnd{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.EncryptionKey, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeSessionSearch(reader BytesReader) (Message, error) { - var err error = nil - msg := &SessionSearch{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &SessionSearch{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Partition, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIOSBatchMeta(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSBatchMeta{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IOSBatchMeta{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Length, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.FirstIndex, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIOSSessionStart(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSSessionStart{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IOSSessionStart{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.ProjectID, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.TrackerVersion, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.RevID, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.UserUUID, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.UserOS, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.UserOSVersion, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.UserDevice, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.UserDeviceType, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.UserCountry, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIOSSessionEnd(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSSessionEnd{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSSessionEnd{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } + return msg, err } func DecodeIOSMetadata(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSMetadata{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IOSMetadata{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Length, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Key, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Value, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIOSCustomEvent(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSCustomEvent{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IOSCustomEvent{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Length, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Name, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Payload, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIOSUserID(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSUserID{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IOSUserID{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Length, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Value, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIOSUserAnonymousID(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSUserAnonymousID{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IOSUserAnonymousID{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Length, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Value, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIOSScreenChanges(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSScreenChanges{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IOSScreenChanges{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Length, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.X, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Y, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Width, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Height, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIOSCrash(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSCrash{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IOSCrash{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Length, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Name, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Reason, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Stacktrace, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIOSScreenEnter(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSScreenEnter{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IOSScreenEnter{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Length, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Title, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.ViewName, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIOSScreenLeave(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSScreenLeave{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IOSScreenLeave{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Length, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Title, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.ViewName, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIOSClickEvent(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSClickEvent{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IOSClickEvent{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Length, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Label, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.X, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Y, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIOSInputEvent(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSInputEvent{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IOSInputEvent{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Length, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Value, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.ValueMasked, err = reader.ReadBoolean(); err != nil { - return nil, err - } + return nil, err + } if msg.Label, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIOSPerformanceEvent(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSPerformanceEvent{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IOSPerformanceEvent{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Length, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Name, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Value, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIOSLog(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSLog{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IOSLog{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Length, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Severity, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Content, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIOSInternalError(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSInternalError{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IOSInternalError{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Length, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Content, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIOSNetworkCall(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSNetworkCall{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IOSNetworkCall{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Length, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Duration, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.Headers, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Body, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.URL, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Success, err = reader.ReadBoolean(); err != nil { - return nil, err - } + return nil, err + } if msg.Method, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Status, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIOSPerformanceAggregated(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSPerformanceAggregated{} - if msg.TimestampStart, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IOSPerformanceAggregated{} + if msg.TimestampStart, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.TimestampEnd, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.MinFPS, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.AvgFPS, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.MaxFPS, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.MinCPU, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.AvgCPU, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.MaxCPU, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.MinMemory, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.AvgMemory, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.MaxMemory, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.MinBattery, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.AvgBattery, err = reader.ReadUint(); err != nil { - return nil, err - } + return nil, err + } if msg.MaxBattery, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func DecodeIOSIssueEvent(reader BytesReader) (Message, error) { - var err error = nil - msg := &IOSIssueEvent{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } + var err error = nil + msg := &IOSIssueEvent{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } if msg.Type, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.ContextString, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Context, err = reader.ReadString(); err != nil { - return nil, err - } + return nil, err + } if msg.Payload, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err + return nil, err + } + return msg, err } func ReadMessage(t uint64, reader BytesReader) (Message, error) { @@ -1812,10 +1740,6 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) { return DecodePageEvent(reader) case 32: return DecodeInputEvent(reader) - case 33: - return DecodeClickEvent(reader) - case 35: - return DecodeResourceEvent(reader) case 37: return DecodeCSSInsertRule(reader) case 38: @@ -1844,8 +1768,6 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) { return DecodeStringDict(reader) case 51: return DecodeSetNodeAttributeDict(reader) - case 52: - return DecodeDOMDrop(reader) case 53: return DecodeResourceTiming(reader) case 54: diff --git a/backend/pkg/sessions/builder.go b/backend/pkg/sessions/builder.go index d21fd890a..683e4ccb5 100644 --- a/backend/pkg/sessions/builder.go +++ b/backend/pkg/sessions/builder.go @@ -10,7 +10,7 @@ import ( type builder struct { sessionID uint64 - readyMsgs []Message + readyMsgs chan Message timestamp uint64 lastMessageID uint64 lastSystemTime time.Time @@ -18,20 +18,14 @@ type builder struct { ended bool } -func NewBuilder(sessionID uint64, handlers ...handlers.MessageProcessor) *builder { +func NewBuilder(sessionID uint64, events chan Message, handlers ...handlers.MessageProcessor) *builder { return &builder{ sessionID: sessionID, processors: handlers, + readyMsgs: events, } } -func (b *builder) iterateReadyMessages(iter func(msg Message)) { - for _, readyMsg := range b.readyMsgs { - iter(readyMsg) - } - b.readyMsgs = nil -} - func (b *builder) checkSessionEnd(message Message) { if _, isEnd := message.(*IOSSessionEnd); isEnd { b.ended = true @@ -41,34 +35,31 @@ func (b *builder) checkSessionEnd(message Message) { } } -func (b *builder) handleMessage(message Message, messageID uint64) { - if messageID < b.lastMessageID { +func (b *builder) handleMessage(m Message) { + if m.MsgID() < b.lastMessageID { // May happen in case of duplicated messages in kafka (if `idempotence: false`) - log.Printf("skip message with wrong msgID, sessID: %d, msgID: %d, lastID: %d", b.sessionID, messageID, b.lastMessageID) + log.Printf("skip message with wrong msgID, sessID: %d, msgID: %d, lastID: %d", b.sessionID, m.MsgID(), b.lastMessageID) return } - timestamp := GetTimestamp(message) - if timestamp == 0 { - switch message.(type) { + if m.Time() <= 0 { + switch m.(type) { case *IssueEvent, *PerformanceTrackAggr: break default: - log.Printf("skip message with empty timestamp, sessID: %d, msgID: %d, msgType: %d", b.sessionID, messageID, message.TypeID()) + log.Printf("skip message with incorrect timestamp, sessID: %d, msgID: %d, msgType: %d", b.sessionID, m.MsgID(), m.TypeID()) } return } - if timestamp < b.timestamp { - //log.Printf("skip message with wrong timestamp, sessID: %d, msgID: %d, type: %d, msgTS: %d, lastTS: %d", b.sessionID, messageID, message.TypeID(), timestamp, b.timestamp) - } else { - b.timestamp = timestamp + if m.Time() > b.timestamp { + b.timestamp = m.Time() } - b.lastSystemTime = time.Now() + // Process current message for _, p := range b.processors { - if rm := p.Handle(message, messageID, b.timestamp); rm != nil { - rm.Meta().SetMeta(message.Meta()) - b.readyMsgs = append(b.readyMsgs, rm) + if rm := p.Handle(m, b.timestamp); rm != nil { + rm.Meta().SetMeta(m.Meta()) + b.readyMsgs <- rm } } - b.checkSessionEnd(message) + b.checkSessionEnd(m) } diff --git a/backend/pkg/sessions/builderMap.go b/backend/pkg/sessions/builderMap.go index 85e787929..9e66ce260 100644 --- a/backend/pkg/sessions/builderMap.go +++ b/backend/pkg/sessions/builderMap.go @@ -2,92 +2,98 @@ package sessions import ( "log" - "openreplay/backend/pkg/handlers" + "sync" "time" + "openreplay/backend/pkg/handlers" . "openreplay/backend/pkg/messages" ) -const FORCE_DELETE_TIMEOUT = 4 * time.Hour +const ForceDeleteTimeout = 30 * time.Minute type builderMap struct { handlersFabric func() []handlers.MessageProcessor sessions map[uint64]*builder + mutex *sync.Mutex + events chan Message + done chan struct{} } -func NewBuilderMap(handlersFabric func() []handlers.MessageProcessor) *builderMap { - return &builderMap{ +type EventBuilder interface { + Events() chan Message + HandleMessage(msg Message) + Stop() +} + +func NewBuilderMap(handlersFabric func() []handlers.MessageProcessor) EventBuilder { + b := &builderMap{ handlersFabric: handlersFabric, sessions: make(map[uint64]*builder), + mutex: &sync.Mutex{}, + events: make(chan Message, 1024*10), + done: make(chan struct{}), } -} - -func (m *builderMap) GetBuilder(sessionID uint64) *builder { - b := m.sessions[sessionID] - if b == nil { - b = NewBuilder(sessionID, m.handlersFabric()...) // Should create new instances - m.sessions[sessionID] = b - } + go b.worker() return b } -func (m *builderMap) HandleMessage(msg Message) { - sessionID := msg.SessionID() - messageID := msg.Meta().Index - b := m.GetBuilder(sessionID) - b.handleMessage(msg, messageID) +func (m *builderMap) getBuilder(sessionID uint64) *builder { + m.mutex.Lock() + b := m.sessions[sessionID] + if b == nil { + b = NewBuilder(sessionID, m.events, m.handlersFabric()...) + m.sessions[sessionID] = b + } + m.mutex.Unlock() + return b } -func (m *builderMap) ClearOldSessions() { +func (m *builderMap) Events() chan Message { + return m.events +} + +func (m *builderMap) HandleMessage(msg Message) { + m.getBuilder(msg.SessionID()).handleMessage(msg) +} + +func (m *builderMap) worker() { + tick := time.Tick(10 * time.Second) + for { + select { + case <-tick: + m.checkSessions() + case <-m.done: + return + } + } +} + +func (m *builderMap) checkSessions() { + m.mutex.Lock() deleted := 0 now := time.Now() - for id, sess := range m.sessions { - if sess.lastSystemTime.Add(FORCE_DELETE_TIMEOUT).Before(now) { - // Should delete zombie session - delete(m.sessions, id) + for sessID, b := range m.sessions { + // Check session's events + if b.ended || b.lastSystemTime.Add(ForceDeleteTimeout).Before(now) { + // Build rest of messages + for _, p := range b.processors { + if rm := p.Build(); rm != nil { + rm.Meta().SetSessionID(sessID) + m.events <- rm + } + } + delete(m.sessions, sessID) deleted++ } } + m.mutex.Unlock() if deleted > 0 { log.Printf("deleted %d sessions from message builder", deleted) } } -func (m *builderMap) iterateSessionReadyMessages(sessionID uint64, b *builder, iter func(msg Message)) { - if b.ended || b.lastSystemTime.Add(FORCE_DELETE_TIMEOUT).Before(time.Now()) { - for _, p := range b.processors { - if rm := p.Build(); rm != nil { - rm.Meta().SetSessionID(sessionID) - b.readyMsgs = append(b.readyMsgs, rm) - } - } - } - b.iterateReadyMessages(iter) - if b.ended { - delete(m.sessions, sessionID) - } -} - -func (m *builderMap) IterateReadyMessages(iter func(sessionID uint64, msg Message)) { - for sessionID, session := range m.sessions { - m.iterateSessionReadyMessages( - sessionID, - session, - func(msg Message) { - iter(sessionID, msg) - }, - ) - } -} - -func (m *builderMap) IterateSessionReadyMessages(sessionID uint64, iter func(msg Message)) { - session, ok := m.sessions[sessionID] - if !ok { - return - } - m.iterateSessionReadyMessages( - sessionID, - session, - iter, - ) +func (m *builderMap) Stop() { + m.done <- struct{}{} + m.checkSessions() + close(m.events) } diff --git a/backend/pkg/terminator/terminator.go b/backend/pkg/terminator/terminator.go new file mode 100644 index 000000000..29e106aa1 --- /dev/null +++ b/backend/pkg/terminator/terminator.go @@ -0,0 +1,22 @@ +package terminator + +import ( + "log" + "os" + "os/signal" + "syscall" +) + +// ServiceStopper is a common interface for all services +type ServiceStopper interface { + Stop() +} + +func Wait(s ServiceStopper) { + sigChan := make(chan os.Signal, 1) + signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM) + sig := <-sigChan + log.Printf("Caught signal %v: terminating\n", sig) + s.Stop() + os.Exit(0) +} diff --git a/backend/pkg/url/url.go b/backend/pkg/url/url.go index 654e803eb..30b3f2a7e 100644 --- a/backend/pkg/url/url.go +++ b/backend/pkg/url/url.go @@ -1,7 +1,7 @@ package url import ( - _url "net/url" + "net/url" "strings" ) @@ -11,7 +11,7 @@ func DiscardURLQuery(url string) string { func GetURLParts(rawURL string) (string, string, string, error) { rawURL = strings.Replace(rawURL, "\t", "", -1) // Other chars? - u, err := _url.Parse(rawURL) + u, err := url.Parse(rawURL) if err != nil { return "", "", "", err } @@ -22,3 +22,34 @@ func GetURLParts(rawURL string) (string, string, string, error) { } return u.Host, path, u.RawQuery, nil } + +func getURLExtension(URL string) string { + u, err := url.Parse(URL) + if err != nil { + return "" + } + i := strings.LastIndex(u.Path, ".") + return u.Path[i+1:] +} + +func GetResourceType(initiator string, URL string) string { + switch initiator { + case "xmlhttprequest", "fetch": + return "fetch" + case "img": + return "img" + default: + switch getURLExtension(URL) { + case "css": + return "stylesheet" + case "js": + return "script" + case "png", "gif", "jpg", "jpeg", "svg": + return "img" + case "mp4", "mkv", "ogg", "webm", "avi", "mp3": + return "media" + default: + return "other" + } + } +} diff --git a/ee/backend/internal/db/datasaver/fts.go b/ee/backend/internal/db/datasaver/fts.go index 1ff546d27..3c049acae 100644 --- a/ee/backend/internal/db/datasaver/fts.go +++ b/ee/backend/internal/db/datasaver/fts.go @@ -18,6 +18,20 @@ type NetworkRequestFTS struct { Duration uint64 `json:"duration"` } +func WrapNetworkRequest(m *messages.NetworkRequest, projID uint32) *NetworkRequestFTS { + return &NetworkRequestFTS{ + SessionID: m.SessionID(), + ProjectID: projID, + Method: m.Method, + URL: m.URL, + Request: m.Request, + Response: m.Response, + Status: m.Status, + Timestamp: m.Timestamp, + Duration: m.Duration, + } +} + type PageEventFTS struct { SessionID uint64 `json:"session_id"` ProjectID uint32 `json:"project_id"` @@ -40,6 +54,30 @@ type PageEventFTS struct { TimeToInteractive uint64 `json:"time_to_interactive"` } +func WrapPageEvent(m *messages.PageEvent, projID uint32) *PageEventFTS { + return &PageEventFTS{ + SessionID: m.SessionID(), + ProjectID: projID, + MessageID: m.MessageID, + Timestamp: m.Timestamp, + URL: m.URL, + Referrer: m.Referrer, + Loaded: m.Loaded, + RequestStart: m.RequestStart, + ResponseStart: m.ResponseStart, + ResponseEnd: m.ResponseEnd, + DomContentLoadedEventStart: m.DomContentLoadedEventStart, + DomContentLoadedEventEnd: m.DomContentLoadedEventEnd, + LoadEventStart: m.LoadEventStart, + LoadEventEnd: m.LoadEventEnd, + FirstPaint: m.FirstPaint, + FirstContentfulPaint: m.FirstContentfulPaint, + SpeedIndex: m.SpeedIndex, + VisuallyComplete: m.VisuallyComplete, + TimeToInteractive: m.TimeToInteractive, + } +} + type GraphQLFTS struct { SessionID uint64 `json:"session_id"` ProjectID uint32 `json:"project_id"` @@ -49,68 +87,46 @@ type GraphQLFTS struct { Response string `json:"response"` } -func (s *Saver) SendToFTS(msg messages.Message, projID uint32) { +func WrapGraphQL(m *messages.GraphQL, projID uint32) *GraphQLFTS { + return &GraphQLFTS{ + SessionID: m.SessionID(), + ProjectID: projID, + OperationKind: m.OperationKind, + OperationName: m.OperationName, + Variables: m.Variables, + Response: m.Response, + } +} + +func (s *saverImpl) sendToFTS(msg messages.Message) { // Skip, if FTS is disabled if s.producer == nil { return } - var ( - event []byte - err error + projID uint32 + event []byte + err error ) + if sess, err := s.pg.Cache.GetSession(msg.SessionID()); err == nil { + projID = sess.ProjectID + } + switch m := msg.(type) { // Common case *messages.NetworkRequest: - event, err = json.Marshal(NetworkRequestFTS{ - SessionID: msg.SessionID(), - ProjectID: projID, - Method: m.Method, - URL: m.URL, - Request: m.Request, - Response: m.Response, - Status: m.Status, - Timestamp: m.Timestamp, - Duration: m.Duration, - }) + event, err = json.Marshal(WrapNetworkRequest(m, projID)) case *messages.PageEvent: - event, err = json.Marshal(PageEventFTS{ - SessionID: msg.SessionID(), - ProjectID: projID, - MessageID: m.MessageID, - Timestamp: m.Timestamp, - URL: m.URL, - Referrer: m.Referrer, - Loaded: m.Loaded, - RequestStart: m.RequestStart, - ResponseStart: m.ResponseStart, - ResponseEnd: m.ResponseEnd, - DomContentLoadedEventStart: m.DomContentLoadedEventStart, - DomContentLoadedEventEnd: m.DomContentLoadedEventEnd, - LoadEventStart: m.LoadEventStart, - LoadEventEnd: m.LoadEventEnd, - FirstPaint: m.FirstPaint, - FirstContentfulPaint: m.FirstContentfulPaint, - SpeedIndex: m.SpeedIndex, - VisuallyComplete: m.VisuallyComplete, - TimeToInteractive: m.TimeToInteractive, - }) + event, err = json.Marshal(WrapPageEvent(m, projID)) case *messages.GraphQL: - event, err = json.Marshal(GraphQLFTS{ - SessionID: msg.SessionID(), - ProjectID: projID, - OperationKind: m.OperationKind, - OperationName: m.OperationName, - Variables: m.Variables, - Response: m.Response, - }) + event, err = json.Marshal(WrapGraphQL(m, projID)) } if err != nil { log.Printf("can't marshal json for quickwit: %s", err) } else { if len(event) > 0 { - if err := s.producer.Produce(s.topic, msg.SessionID(), event); err != nil { + if err := s.producer.Produce(s.cfg.QuickwitTopic, msg.SessionID(), event); err != nil { log.Printf("can't send event to quickwit: %s", err) } } diff --git a/ee/backend/internal/db/datasaver/messages.go b/ee/backend/internal/db/datasaver/messages.go deleted file mode 100644 index 0a729ee63..000000000 --- a/ee/backend/internal/db/datasaver/messages.go +++ /dev/null @@ -1,114 +0,0 @@ -package datasaver - -import ( - "fmt" - "log" - . "openreplay/backend/pkg/messages" -) - -func (mi *Saver) InsertMessage(msg Message) error { - sessionID := msg.SessionID() - switch m := msg.(type) { - // Common - case *Metadata: - if err := mi.pg.InsertMetadata(sessionID, m); err != nil { - return fmt.Errorf("insert metadata err: %s", err) - } - return nil - case *IssueEvent: - session, err := mi.pg.Cache.GetSession(sessionID) - if err != nil { - log.Printf("can't get session info for CH: %s", err) - } else { - if err := mi.ch.InsertIssue(session, m); err != nil { - log.Printf("can't insert issue event into clickhouse: %s", err) - } - } - return mi.pg.InsertIssueEvent(sessionID, m) - //TODO: message adapter (transformer) (at the level of pkg/message) for types: *IOSMetadata, *IOSIssueEvent and others - - // Web - case *SessionStart: - return mi.pg.HandleWebSessionStart(sessionID, m) - case *SessionEnd: - return mi.pg.HandleWebSessionEnd(sessionID, m) - case *UserID: - return mi.pg.InsertWebUserID(sessionID, m) - case *UserAnonymousID: - return mi.pg.InsertWebUserAnonymousID(sessionID, m) - case *CustomEvent: - session, err := mi.pg.Cache.GetSession(sessionID) - if err != nil { - log.Printf("can't get session info for CH: %s", err) - } else { - if err := mi.ch.InsertCustom(session, m); err != nil { - log.Printf("can't insert graphQL event into clickhouse: %s", err) - } - } - return mi.pg.InsertWebCustomEvent(sessionID, m) - case *ClickEvent: - return mi.pg.InsertWebClickEvent(sessionID, m) - case *InputEvent: - return mi.pg.InsertWebInputEvent(sessionID, m) - - // Unique Web messages - case *PageEvent: - return mi.pg.InsertWebPageEvent(sessionID, m) - case *JSException: - return mi.pg.InsertWebJSException(m) - case *IntegrationEvent: - return mi.pg.InsertWebIntegrationEvent(m) - case *NetworkRequest: - session, err := mi.pg.Cache.GetSession(sessionID) - if err != nil { - log.Printf("can't get session info for CH: %s", err) - } else { - project, err := mi.pg.GetProject(session.ProjectID) - if err != nil { - log.Printf("can't get project: %s", err) - } else { - if err := mi.ch.InsertRequest(session, m, project.SaveRequestPayloads); err != nil { - log.Printf("can't insert request event into clickhouse: %s", err) - } - } - } - return mi.pg.InsertWebNetworkRequest(sessionID, m) - case *GraphQL: - session, err := mi.pg.Cache.GetSession(sessionID) - if err != nil { - log.Printf("can't get session info for CH: %s", err) - } else { - if err := mi.ch.InsertGraphQL(session, m); err != nil { - log.Printf("can't insert graphQL event into clickhouse: %s", err) - } - } - return mi.pg.InsertWebGraphQL(sessionID, m) - case *SetPageLocation: - return mi.pg.InsertSessionReferrer(sessionID, m.Referrer) - - // IOS - case *IOSSessionStart: - return mi.pg.InsertIOSSessionStart(sessionID, m) - case *IOSSessionEnd: - return mi.pg.InsertIOSSessionEnd(sessionID, m) - case *IOSUserID: - return mi.pg.InsertIOSUserID(sessionID, m) - case *IOSUserAnonymousID: - return mi.pg.InsertIOSUserAnonymousID(sessionID, m) - case *IOSCustomEvent: - return mi.pg.InsertIOSCustomEvent(sessionID, m) - case *IOSClickEvent: - return mi.pg.InsertIOSClickEvent(sessionID, m) - case *IOSInputEvent: - return mi.pg.InsertIOSInputEvent(sessionID, m) - // Unique IOS messages - case *IOSNetworkCall: - return mi.pg.InsertIOSNetworkCall(sessionID, m) - case *IOSScreenEnter: - return mi.pg.InsertIOSScreenEnter(sessionID, m) - case *IOSCrash: - return mi.pg.InsertIOSCrash(sessionID, m) - - } - return nil // "Not implemented" -} diff --git a/ee/backend/internal/db/datasaver/methods.go b/ee/backend/internal/db/datasaver/methods.go new file mode 100644 index 000000000..277fd8906 --- /dev/null +++ b/ee/backend/internal/db/datasaver/methods.go @@ -0,0 +1,83 @@ +package datasaver + +import ( + "errors" + "log" + + "openreplay/backend/pkg/db/cache" + "openreplay/backend/pkg/db/clickhouse" + "openreplay/backend/pkg/db/types" + "openreplay/backend/pkg/env" + . "openreplay/backend/pkg/messages" + "openreplay/backend/pkg/queue" +) + +func (s *saverImpl) init() { + s.ch = clickhouse.NewConnector(env.String("CLICKHOUSE_STRING")) + if err := s.ch.Prepare(); err != nil { + log.Fatalf("can't prepare clickhouse: %s", err) + } + s.pg.Conn.SetClickHouse(s.ch) + if s.cfg.UseQuickwit { + s.producer = queue.NewProducer(s.cfg.MessageSizeLimit, true) + } +} + +func (s *saverImpl) handleExtraMessage(msg Message) error { + // Send data to quickwit + s.sendToFTS(msg) + + // Get session data + var ( + session *types.Session + err error + ) + if msg.TypeID() == MsgSessionEnd { + session, err = s.pg.GetSession(msg.SessionID()) + } else { + session, err = s.pg.Cache.GetSession(msg.SessionID()) + } + if session == nil { + if err != nil && !errors.Is(err, cache.NilSessionInCacheError) { + log.Printf("Error on session retrieving from cache: %v, SessionID: %v, Message: %v", err, msg.SessionID(), msg) + } + return err + } + + // Handle message + switch m := msg.(type) { + case *SessionEnd: + return s.ch.InsertWebSession(session) + case *PerformanceTrackAggr: + return s.ch.InsertWebPerformanceTrackAggr(session, m) + case *MouseClick: + return s.ch.InsertWebClickEvent(session, m) + case *InputEvent: + return s.ch.InsertWebInputEvent(session, m) + // Unique for Web + case *PageEvent: + return s.ch.InsertWebPageEvent(session, m) + case *ResourceTiming: + return s.ch.InsertWebResourceEvent(session, m) + case *JSException: + return s.ch.InsertWebErrorEvent(session, types.WrapJSException(m)) + case *IntegrationEvent: + return s.ch.InsertWebErrorEvent(session, types.WrapIntegrationEvent(m)) + case *IssueEvent: + return s.ch.InsertIssue(session, m) + case *CustomEvent: + return s.ch.InsertCustom(session, m) + case *NetworkRequest: + project, err := s.pg.GetProject(session.ProjectID) + if err != nil { + log.Printf("can't get project: %s", err) + } else { + if err := s.ch.InsertRequest(session, m, project.SaveRequestPayloads); err != nil { + log.Printf("can't insert request event into clickhouse: %s", err) + } + } + case *GraphQL: + return s.ch.InsertGraphQL(session, m) + } + return nil +} diff --git a/ee/backend/internal/db/datasaver/saver.go b/ee/backend/internal/db/datasaver/saver.go deleted file mode 100644 index e05e502f1..000000000 --- a/ee/backend/internal/db/datasaver/saver.go +++ /dev/null @@ -1,24 +0,0 @@ -package datasaver - -import ( - "openreplay/backend/internal/config/db" - "openreplay/backend/pkg/db/cache" - "openreplay/backend/pkg/db/clickhouse" - "openreplay/backend/pkg/queue" - "openreplay/backend/pkg/queue/types" -) - -type Saver struct { - pg *cache.PGCache - ch clickhouse.Connector - producer types.Producer - topic string -} - -func New(pg *cache.PGCache, cfg *db.Config) *Saver { - var producer types.Producer = nil - if cfg.UseQuickwit { - producer = queue.NewProducer(cfg.MessageSizeLimit, true) - } - return &Saver{pg: pg, producer: producer, topic: cfg.QuickwitTopic} -} diff --git a/ee/backend/internal/db/datasaver/stats.go b/ee/backend/internal/db/datasaver/stats.go deleted file mode 100644 index 049c319bd..000000000 --- a/ee/backend/internal/db/datasaver/stats.go +++ /dev/null @@ -1,56 +0,0 @@ -package datasaver - -import ( - "log" - "openreplay/backend/pkg/db/clickhouse" - "openreplay/backend/pkg/db/types" - "openreplay/backend/pkg/env" - "openreplay/backend/pkg/messages" -) - -func (si *Saver) InitStats() { - si.ch = clickhouse.NewConnector(env.String("CLICKHOUSE_STRING")) - if err := si.ch.Prepare(); err != nil { - log.Fatalf("Clickhouse prepare error: %v\n", err) - } - si.pg.Conn.SetClickHouse(si.ch) -} - -func (si *Saver) InsertStats(session *types.Session, msg messages.Message) error { - // Send data to quickwit - if sess, err := si.pg.Cache.GetSession(msg.SessionID()); err != nil { - si.SendToFTS(msg, 0) - } else { - si.SendToFTS(msg, sess.ProjectID) - } - - switch m := msg.(type) { - // Web - case *messages.SessionEnd: - return si.ch.InsertWebSession(session) - case *messages.PerformanceTrackAggr: - return si.ch.InsertWebPerformanceTrackAggr(session, m) - case *messages.ClickEvent: - return si.ch.InsertWebClickEvent(session, m) - case *messages.InputEvent: - return si.ch.InsertWebInputEvent(session, m) - // Unique for Web - case *messages.PageEvent: - return si.ch.InsertWebPageEvent(session, m) - case *messages.ResourceEvent: - return si.ch.InsertWebResourceEvent(session, m) - case *messages.JSException: - return si.ch.InsertWebErrorEvent(session, types.WrapJSException(m)) - case *messages.IntegrationEvent: - return si.ch.InsertWebErrorEvent(session, types.WrapIntegrationEvent(m)) - } - return nil -} - -func (si *Saver) CommitStats() error { - return si.ch.Commit() -} - -func (si *Saver) Close() error { - return si.ch.Stop() -} diff --git a/ee/backend/pkg/db/clickhouse/connector.go b/ee/backend/pkg/db/clickhouse/connector.go index b872adcc2..489411550 100644 --- a/ee/backend/pkg/db/clickhouse/connector.go +++ b/ee/backend/pkg/db/clickhouse/connector.go @@ -21,9 +21,9 @@ type Connector interface { Commit() error Stop() error InsertWebSession(session *types.Session) error - InsertWebResourceEvent(session *types.Session, msg *messages.ResourceEvent) error + InsertWebResourceEvent(session *types.Session, msg *messages.ResourceTiming) error InsertWebPageEvent(session *types.Session, msg *messages.PageEvent) error - InsertWebClickEvent(session *types.Session, msg *messages.ClickEvent) error + InsertWebClickEvent(session *types.Session, msg *messages.MouseClick) error InsertWebInputEvent(session *types.Session, msg *messages.InputEvent) error InsertWebErrorEvent(session *types.Session, msg *types.ErrorEvent) error InsertWebPerformanceTrackAggr(session *types.Session, msg *messages.PerformanceTrackAggr) error @@ -147,9 +147,7 @@ func (c *connectorImpl) worker() { for { select { case t := <-c.workerTask: - start := time.Now() c.sendBulks(t) - log.Printf("ch bulks dur: %d", time.Now().Sub(start).Milliseconds()) case <-c.done: for t := range c.workerTask { c.sendBulks(t) @@ -242,28 +240,25 @@ func (c *connectorImpl) InsertWebSession(session *types.Session) error { return nil } -func (c *connectorImpl) InsertWebResourceEvent(session *types.Session, msg *messages.ResourceEvent) error { - var method interface{} = url.EnsureMethod(msg.Method) - if method == "" { - method = nil - } - resourceType := url.EnsureType(msg.Type) +func (c *connectorImpl) InsertWebResourceEvent(session *types.Session, msg *messages.ResourceTiming) error { + msgType := url.GetResourceType(msg.Initiator, msg.URL) + resourceType := url.EnsureType(msgType) if resourceType == "" { - return fmt.Errorf("can't parse resource type, sess: %s, type: %s", session.SessionID, msg.Type) + return fmt.Errorf("can't parse resource type, sess: %d, type: %s", session.SessionID, msgType) } if err := c.batches["resources"].Append( session.SessionID, uint16(session.ProjectID), - msg.MessageID, + msg.MsgID(), datetime(msg.Timestamp), url.DiscardURLQuery(msg.URL), - msg.Type, + msgType, nullableUint16(uint16(msg.Duration)), nullableUint16(uint16(msg.TTFB)), nullableUint16(uint16(msg.HeaderSize)), nullableUint32(uint32(msg.EncodedBodySize)), nullableUint32(uint32(msg.DecodedBodySize)), - msg.Success, + msg.Duration != 0, ); err != nil { c.checkError("resources", err) return fmt.Errorf("can't append to resources batch: %s", err) @@ -298,14 +293,14 @@ func (c *connectorImpl) InsertWebPageEvent(session *types.Session, msg *messages return nil } -func (c *connectorImpl) InsertWebClickEvent(session *types.Session, msg *messages.ClickEvent) error { +func (c *connectorImpl) InsertWebClickEvent(session *types.Session, msg *messages.MouseClick) error { if msg.Label == "" { return nil } if err := c.batches["clicks"].Append( session.SessionID, uint16(session.ProjectID), - msg.MessageID, + msg.MsgID(), datetime(msg.Timestamp), msg.Label, nullableUint32(uint32(msg.HesitationTime)), diff --git a/ee/connectors/msgcodec/messages.py b/ee/connectors/msgcodec/messages.py index 54f8df955..d2f684148 100644 --- a/ee/connectors/msgcodec/messages.py +++ b/ee/connectors/msgcodec/messages.py @@ -315,35 +315,6 @@ class InputEvent(Message): self.label = label -class ClickEvent(Message): - __id__ = 33 - - def __init__(self, message_id, timestamp, hesitation_time, label, selector): - self.message_id = message_id - self.timestamp = timestamp - self.hesitation_time = hesitation_time - self.label = label - self.selector = selector - - -class ResourceEvent(Message): - __id__ = 35 - - def __init__(self, message_id, timestamp, duration, ttfb, header_size, encoded_body_size, decoded_body_size, url, type, success, method, status): - self.message_id = message_id - self.timestamp = timestamp - self.duration = duration - self.ttfb = ttfb - self.header_size = header_size - self.encoded_body_size = encoded_body_size - self.decoded_body_size = decoded_body_size - self.url = url - self.type = type - self.success = success - self.method = method - self.status = status - - class CSSInsertRule(Message): __id__ = 37 @@ -470,13 +441,6 @@ class SetNodeAttributeDict(Message): self.value_key = value_key -class DOMDrop(Message): - __id__ = 52 - - def __init__(self, timestamp): - self.timestamp = timestamp - - class ResourceTiming(Message): __id__ = 53 diff --git a/ee/connectors/msgcodec/msgcodec.py b/ee/connectors/msgcodec/msgcodec.py index 0ba21ea12..cd23833da 100644 --- a/ee/connectors/msgcodec/msgcodec.py +++ b/ee/connectors/msgcodec/msgcodec.py @@ -321,31 +321,6 @@ class MessageCodec(Codec): label=self.read_string(reader) ) - if message_id == 33: - return ClickEvent( - message_id=self.read_uint(reader), - timestamp=self.read_uint(reader), - hesitation_time=self.read_uint(reader), - label=self.read_string(reader), - selector=self.read_string(reader) - ) - - if message_id == 35: - return ResourceEvent( - message_id=self.read_uint(reader), - timestamp=self.read_uint(reader), - duration=self.read_uint(reader), - ttfb=self.read_uint(reader), - header_size=self.read_uint(reader), - encoded_body_size=self.read_uint(reader), - decoded_body_size=self.read_uint(reader), - url=self.read_string(reader), - type=self.read_string(reader), - success=self.read_boolean(reader), - method=self.read_string(reader), - status=self.read_uint(reader) - ) - if message_id == 37: return CSSInsertRule( id=self.read_uint(reader), @@ -444,11 +419,6 @@ class MessageCodec(Codec): value_key=self.read_uint(reader) ) - if message_id == 52: - return DOMDrop( - timestamp=self.read_uint(reader) - ) - if message_id == 53: return ResourceTiming( timestamp=self.read_uint(reader), diff --git a/mobs/messages.rb b/mobs/messages.rb index ef36ebfa7..c4124226e 100644 --- a/mobs/messages.rb +++ b/mobs/messages.rb @@ -187,29 +187,6 @@ message 32, 'InputEvent', :tracker => false, :replayer => false do boolean 'ValueMasked' string 'Label' end -message 33, 'ClickEvent', :tracker => false, :replayer => false do - uint 'MessageID' - uint 'Timestamp' - uint 'HesitationTime' - string 'Label' - string 'Selector' -end -## 34 -message 35, 'ResourceEvent', :tracker => false, :replayer => false do - uint 'MessageID' - uint 'Timestamp' - uint 'Duration' - uint 'TTFB' - uint 'HeaderSize' - uint 'EncodedBodySize' - uint 'DecodedBodySize' - string 'URL' - string 'Type' - boolean 'Success' - string 'Method' - uint 'Status' -end -#36 # DEPRECATED since 4.0.2 in favor of AdoptedSSInsertRule + AdoptedSSAddOwner message 37, 'CSSInsertRule' do @@ -288,12 +265,6 @@ message 51, "SetNodeAttributeDict" do uint 'NameKey' uint 'ValueKey' end - -## 50,51 -# Doesn't work properly. TODO: Make proper detections in tracker -message 52, 'DOMDrop', :tracker => false, :replayer => false do - uint 'Timestamp' -end message 53, 'ResourceTiming', :replayer => :devtools do uint 'Timestamp' uint 'Duration' From 73d6fc9dab79207a80230dd4e06146e3fac14f60 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Thu, 9 Mar 2023 10:36:46 +0100 Subject: [PATCH 215/218] chore(helm): calice updating liveness probe Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay/charts/chalice/values.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/helmcharts/openreplay/charts/chalice/values.yaml b/scripts/helmcharts/openreplay/charts/chalice/values.yaml index 1a1d496ed..383f00e06 100644 --- a/scripts/helmcharts/openreplay/charts/chalice/values.yaml +++ b/scripts/helmcharts/openreplay/charts/chalice/values.yaml @@ -121,11 +121,11 @@ affinity: {} healthCheck: livenessProbe: httpGet: - path: / + path: /signup port: 8000 - initialDelaySeconds: 100 - periodSeconds: 15 - timeoutSeconds: 10 + initialDelaySeconds: 120 + periodSeconds: 30 + timeoutSeconds: 15 pvc: From 64cea03ce05bb9ab26a642e19b3b797d23e6709f Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Thu, 9 Mar 2023 10:50:17 +0100 Subject: [PATCH 216/218] fix(player): fix abs time tooltip tracking --- .../Session/Player/ClickMapRenderer/ThinPlayer.tsx | 6 +++++- .../app/components/Session_/Player/Controls/Timeline.tsx | 9 +++++++-- .../Session_/Player/Controls/components/TimeTooltip.tsx | 2 +- tracker/tracker-assist/package.json | 2 +- 4 files changed, 14 insertions(+), 5 deletions(-) diff --git a/frontend/app/components/Session/Player/ClickMapRenderer/ThinPlayer.tsx b/frontend/app/components/Session/Player/ClickMapRenderer/ThinPlayer.tsx index 886bb848c..1855a5046 100644 --- a/frontend/app/components/Session/Player/ClickMapRenderer/ThinPlayer.tsx +++ b/frontend/app/components/Session/Player/ClickMapRenderer/ThinPlayer.tsx @@ -25,7 +25,11 @@ function WebPlayer(props: any) { ); setContextValue({ player: WebPlayerInst, store: PlayerStore }); - return () => WebPlayerInst.clean(); + return () => { + WebPlayerInst.clean(); + // @ts-ignore + setContextValue(defaultContextValue); + } }, [session.sessionId]); const isPlayerReady = contextValue.store?.get().ready diff --git a/frontend/app/components/Session_/Player/Controls/Timeline.tsx b/frontend/app/components/Session_/Player/Controls/Timeline.tsx index 7cde52a96..678982aa9 100644 --- a/frontend/app/components/Session_/Player/Controls/Timeline.tsx +++ b/frontend/app/components/Session_/Player/Controls/Timeline.tsx @@ -81,7 +81,12 @@ function Timeline(props: IProps) { }; const showTimeTooltip = (e: React.MouseEvent) => { - if (e.target !== progressRef.current && e.target !== timelineRef.current) { + if ( + e.target !== progressRef.current + && e.target !== timelineRef.current + // @ts-ignore black magic + && !progressRef.current.contains(e.target) + ) { return props.tooltipVisible && hideTimeTooltip(); } @@ -91,7 +96,7 @@ function Timeline(props: IProps) { const timeLineTooltip = { time: Duration.fromMillis(time).toFormat(`mm:ss`), timeStr, - offset: e.nativeEvent.offsetX, + offset: e.nativeEvent.pageX, isVisible: true, }; diff --git a/frontend/app/components/Session_/Player/Controls/components/TimeTooltip.tsx b/frontend/app/components/Session_/Player/Controls/components/TimeTooltip.tsx index e47593b97..5f746f336 100644 --- a/frontend/app/components/Session_/Player/Controls/components/TimeTooltip.tsx +++ b/frontend/app/components/Session_/Player/Controls/components/TimeTooltip.tsx @@ -22,7 +22,7 @@ function TimeTooltip({ className={stl.timeTooltip} style={{ top: 0, - left: offset, + left: `calc(${offset}px - 0.5rem)`, display: isVisible ? 'block' : 'none', transform: 'translate(-50%, -110%)', whiteSpace: 'nowrap', diff --git a/tracker/tracker-assist/package.json b/tracker/tracker-assist/package.json index 4277e2e36..4c8c98d53 100644 --- a/tracker/tracker-assist/package.json +++ b/tracker/tracker-assist/package.json @@ -31,7 +31,7 @@ "socket.io-client": "^4.4.1" }, "peerDependencies": { - "@openreplay/tracker": ">=3.6.0" + "@openreplay/tracker": ">=5.0.0" }, "devDependencies": { "@openreplay/tracker": "file:../tracker", From 4c198749978b5187400bdc39f4ebfbc03a3d12cb Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Mar 2023 12:51:22 +0100 Subject: [PATCH 217/218] feat(chalice): clickmap fixed click_rage count --- api/chalicelib/core/heatmaps.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/heatmaps.py b/api/chalicelib/core/heatmaps.py index 465e8ce92..528318cde 100644 --- a/api/chalicelib/core/heatmaps.py +++ b/api/chalicelib/core/heatmaps.py @@ -47,7 +47,7 @@ def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema): for j, sf in enumerate(f.filters): f_k = f"issue_svalue{i}{j}" args = {**args, **sh.multi_values(sf.value, value_key=f_k)} - if sf.type == schemas.IssueFilterType._on_selector and len(sf.value) > 0: + if sf.type == schemas.IssueFilterType._selector and len(sf.value) > 0: constraints.append(sh.multi_conditions(f"clicks.selector = %({f_k})s", sf.value, value_key=f_k)) @@ -55,9 +55,8 @@ def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema): constraints.append("""(issues.session_id IS NULL OR (issues.timestamp >= %(startDate)s AND issues.timestamp <= %(endDate)s - AND mis.project_id = %(project_id)s - AND mis.type = 'click_rage'))""") - q_count += ",COALESCE(bool_or(mis.issue_id IS NOT NULL), FALSE) AS click_rage" + AND mis.project_id = %(project_id)s))""") + q_count += ",COALESCE(bool_or(mis.type = 'click_rage'), FALSE) AS click_rage" query_from += """LEFT JOIN events_common.issues USING (timestamp, session_id) LEFT JOIN issues AS mis USING (issue_id)""" with pg_client.PostgresClient() as cur: From fc8604db92c654f2a2819a0c8fd4ffe374d826dc Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Thu, 9 Mar 2023 13:01:39 +0100 Subject: [PATCH 218/218] chore(build): Skip confirmation for signing Signed-off-by: rjshrjndrn --- scripts/helmcharts/build_deploy.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/helmcharts/build_deploy.sh b/scripts/helmcharts/build_deploy.sh index c922878d4..f37f26c94 100644 --- a/scripts/helmcharts/build_deploy.sh +++ b/scripts/helmcharts/build_deploy.sh @@ -10,6 +10,7 @@ docker rmi alpine || true # Signing image # cosign sign --key awskms:///alias/openreplay-container-sign image_url:tag +export COSIGN_YES=true # Skip confirmation export SIGN_IMAGE=1 export PUSH_IMAGE=1 export AWS_DEFAULT_REGION="eu-central-1"