diff --git a/.github/workflows/alerts-ee.yaml b/.github/workflows/alerts-ee.yaml index 10482a7cb..4c1d3b6c4 100644 --- a/.github/workflows/alerts-ee.yaml +++ b/.github/workflows/alerts-ee.yaml @@ -8,7 +8,8 @@ on: default: 'false' push: branches: - - api-v1.10.0 + - dev + - api-* paths: - "ee/api/**" - "api/**" diff --git a/.github/workflows/alerts.yaml b/.github/workflows/alerts.yaml index 539cc5e65..a24f2b855 100644 --- a/.github/workflows/alerts.yaml +++ b/.github/workflows/alerts.yaml @@ -8,7 +8,8 @@ on: default: 'false' push: branches: - - api-v1.10.0 + - dev + - api-* paths: - "api/**" - "!api/.gitignore" diff --git a/.github/workflows/api-ee.yaml b/.github/workflows/api-ee.yaml index b2a31f276..f9a1730f1 100644 --- a/.github/workflows/api-ee.yaml +++ b/.github/workflows/api-ee.yaml @@ -8,7 +8,8 @@ on: default: 'false' push: branches: - - api-v1.10.0 + - dev + - api-* paths: - "ee/api/**" - "api/**" diff --git a/.github/workflows/api.yaml b/.github/workflows/api.yaml index 26d59ff87..8e2f7fa7b 100644 --- a/.github/workflows/api.yaml +++ b/.github/workflows/api.yaml @@ -8,7 +8,8 @@ on: default: 'false' push: branches: - - api-v1.10.0 + - dev + - api-* paths: - "api/**" - "!api/.gitignore" diff --git a/.github/workflows/assist-ee.yaml b/.github/workflows/assist-ee.yaml index 76dcc4a2d..44fcb5650 100644 --- a/.github/workflows/assist-ee.yaml +++ b/.github/workflows/assist-ee.yaml @@ -4,11 +4,12 @@ on: push: branches: - dev + - api-* paths: - - "ee/utilities/**" - - "utilities/**" - - "!utilities/.gitignore" - - "!utilities/*-dev.sh" + - "ee/assist/**" + - "assist/**" + - "!assist/.gitignore" + - "!assist/*-dev.sh" name: Build and Deploy Assist EE @@ -43,7 +44,7 @@ jobs: ENVIRONMENT: staging run: | skip_security_checks=${{ github.event.inputs.skip_security_checks }} - cd utilities + cd assist PUSH_IMAGE=0 bash -x ./build.sh ee [[ "x$skip_security_checks" == "xtrue" ]] || { curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ @@ -100,9 +101,9 @@ jobs: cat /tmp/image_override.yaml # Deploy command - mv openreplay/charts/{ingress-nginx,chalice,quickwit} /tmp + mv openreplay/charts/{ingress-nginx,assist,quickwit} /tmp rm -rf openreplay/charts/* - mv /tmp/{ingress-nginx,chalice,quickwit} openreplay/charts/ + mv /tmp/{ingress-nginx,assist,quickwit} openreplay/charts/ helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f - env: DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} diff --git a/.github/workflows/assist.yaml b/.github/workflows/assist.yaml index 65ca0348c..37582d7d0 100644 --- a/.github/workflows/assist.yaml +++ b/.github/workflows/assist.yaml @@ -4,10 +4,11 @@ on: push: branches: - dev + - api-* paths: - - "utilities/**" - - "!utilities/.gitignore" - - "!utilities/*-dev.sh" + - "assist/**" + - "!assist/.gitignore" + - "!assist/*-dev.sh" name: Build and Deploy Assist @@ -42,7 +43,7 @@ jobs: ENVIRONMENT: staging run: | skip_security_checks=${{ github.event.inputs.skip_security_checks }} - cd utilities + cd assist PUSH_IMAGE=0 bash -x ./build.sh [[ "x$skip_security_checks" == "xtrue" ]] || { curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ @@ -99,9 +100,9 @@ jobs: cat /tmp/image_override.yaml # Deploy command - mv openreplay/charts/{ingress-nginx,chalice,quickwit} /tmp + mv openreplay/charts/{ingress-nginx,assist,quickwit} /tmp rm -rf openreplay/charts/* - mv /tmp/{ingress-nginx,chalice,quickwit} openreplay/charts/ + mv /tmp/{ingress-nginx,assist,quickwit} openreplay/charts/ helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f - env: DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} diff --git a/.github/workflows/crons-ee.yaml b/.github/workflows/crons-ee.yaml index 762dae33e..77c098e4e 100644 --- a/.github/workflows/crons-ee.yaml +++ b/.github/workflows/crons-ee.yaml @@ -8,7 +8,8 @@ on: default: 'false' push: branches: - - api-v1.10.0 + - dev + - api-* paths: - "ee/api/**" - "api/**" diff --git a/.github/workflows/peers-ee.yaml b/.github/workflows/peers-ee.yaml index 5db7436da..ce014a45f 100644 --- a/.github/workflows/peers-ee.yaml +++ b/.github/workflows/peers-ee.yaml @@ -1,16 +1,22 @@ # This action will push the peers changes to aws on: workflow_dispatch: + inputs: + skip_security_checks: + description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false' + required: false + default: 'false' push: branches: - dev + - api-* paths: - "ee/peers/**" - "peers/**" - "!peers/.gitignore" - "!peers/*-dev.sh" -name: Build and Deploy Peers +name: Build and Deploy Peers EE jobs: deploy: @@ -35,30 +41,98 @@ jobs: kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret. id: setcontext - - name: Building and Pushing api image + # Caching docker images + - uses: satackey/action-docker-layer-caching@v0.0.11 + # Ignore the failure of a step and avoid terminating the job. + continue-on-error: true + + + - name: Building and Pushing peers image id: build-image env: DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} - IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} + IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee ENVIRONMENT: staging run: | + skip_security_checks=${{ github.event.inputs.skip_security_checks }} cd peers - PUSH_IMAGE=1 bash build.sh ee + PUSH_IMAGE=0 bash -x ./build.sh ee + [[ "x$skip_security_checks" == "xtrue" ]] || { + curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ + images=("peers") + for image in ${images[*]};do + ./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG + done + err_code=$? + [[ $err_code -ne 0 ]] && { + exit $err_code + } + } && { + echo "Skipping Security Checks" + } + images=("peers") + for image in ${images[*]};do + docker push $DOCKER_REPO/$image:$IMAGE_TAG + done + - name: Creating old image input + run: | + # + # Create yaml with existing image tags + # + kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\ + tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt + + echo > /tmp/image_override.yaml + + for line in `cat /tmp/image_tag.txt`; + do + image_array=($(echo "$line" | tr ':' '\n')) + cat <> /tmp/image_override.yaml + ${image_array[0]}: + image: + # We've to strip off the -ee, as helm will append it. + tag: `echo ${image_array[1]} | cut -d '-' -f 1` + EOF + done + - name: Deploy to kubernetes run: | cd scripts/helmcharts/ - sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.EE_REGISTRY_URL }}\"#g" vars.yaml - sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\" #g" vars.yaml - sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.EE_MINIO_SECRET_KEY }}\" #g" vars.yaml - sed -i "s#domain_name.*#domain_name: \"ee.openreplay.com\" #g" vars.yaml - sed -i "s#kubeconfig.*#kubeconfig_path: ${KUBECONFIG}#g" vars.yaml - sed -i "s/image_tag:.*/image_tag: \"$IMAGE_TAG\"/g" vars.yaml - bash kube-install.sh --app peers + + ## Update secerts + sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml + sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.EE_PG_PASSWORD }}\"/g" vars.yaml + sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\"/g" vars.yaml + sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.EE_MINIO_SECRET_KEY }}\"/g" vars.yaml + sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.EE_JWT_SECRET }}\"/g" vars.yaml + sed -i "s/domainName: \"\"/domainName: \"${{ secrets.EE_DOMAIN_NAME }}\"/g" vars.yaml + sed -i "s/enterpriseEditionLicense: \"\"/enterpriseEditionLicense: \"${{ secrets.EE_LICENSE_KEY }}\"/g" vars.yaml + + # Update changed image tag + sed -i "/peers/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml + + cat /tmp/image_override.yaml + # Deploy command + mv openreplay/charts/{ingress-nginx,peers,quickwit} /tmp + rm -rf openreplay/charts/* + mv /tmp/{ingress-nginx,peers,quickwit} openreplay/charts/ + helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f - env: DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} ENVIRONMENT: staging + - name: Alert slack + if: ${{ failure() }} + uses: rtCamp/action-slack-notify@v2 + env: + SLACK_CHANNEL: ee + SLACK_TITLE: "Failed ${{ github.workflow }}" + SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff' + SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }} + SLACK_USERNAME: "OR Bot" + SLACK_MESSAGE: 'Build failed :bomb:' + # - name: Debug Job # if: ${{ failure() }} # uses: mxschmitt/action-tmate@v3 diff --git a/.github/workflows/peers.yaml b/.github/workflows/peers.yaml index 7b2a715d8..ef564ec65 100644 --- a/.github/workflows/peers.yaml +++ b/.github/workflows/peers.yaml @@ -1,9 +1,15 @@ # This action will push the peers changes to aws on: workflow_dispatch: + inputs: + skip_security_checks: + description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false' + required: false + default: 'false' push: branches: - dev + - api-* paths: - "peers/**" - "!peers/.gitignore" @@ -34,30 +40,96 @@ jobs: kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret. id: setcontext - - name: Building and Pushing api image + # Caching docker images + - uses: satackey/action-docker-layer-caching@v0.0.11 + # Ignore the failure of a step and avoid terminating the job. + continue-on-error: true + + + - name: Building and Pushing peers image id: build-image env: DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} ENVIRONMENT: staging run: | + skip_security_checks=${{ github.event.inputs.skip_security_checks }} cd peers - PUSH_IMAGE=1 bash build.sh + PUSH_IMAGE=0 bash -x ./build.sh + [[ "x$skip_security_checks" == "xtrue" ]] || { + curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ + images=("peers") + for image in ${images[*]};do + ./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG + done + err_code=$? + [[ $err_code -ne 0 ]] && { + exit $err_code + } + } && { + echo "Skipping Security Checks" + } + images=("peers") + for image in ${images[*]};do + docker push $DOCKER_REPO/$image:$IMAGE_TAG + done + - name: Creating old image input + run: | + # + # Create yaml with existing image tags + # + kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\ + tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt + + echo > /tmp/image_override.yaml + + for line in `cat /tmp/image_tag.txt`; + do + image_array=($(echo "$line" | tr ':' '\n')) + cat <> /tmp/image_override.yaml + ${image_array[0]}: + image: + tag: ${image_array[1]} + EOF + done + - name: Deploy to kubernetes run: | cd scripts/helmcharts/ + + ## Update secerts sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml - sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\" #g" vars.yaml - sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\" #g" vars.yaml - sed -i "s#domain_name.*#domain_name: \"foss.openreplay.com\" #g" vars.yaml - sed -i "s#kubeconfig.*#kubeconfig_path: ${KUBECONFIG}#g" vars.yaml - sed -i "s/image_tag:.*/image_tag: \"$IMAGE_TAG\"/g" vars.yaml - bash kube-install.sh --app peers + sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.OSS_PG_PASSWORD }}\"/g" vars.yaml + sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\"/g" vars.yaml + sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\"/g" vars.yaml + sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.OSS_JWT_SECRET }}\"/g" vars.yaml + sed -i "s/domainName: \"\"/domainName: \"${{ secrets.OSS_DOMAIN_NAME }}\"/g" vars.yaml + + # Update changed image tag + sed -i "/peers/{n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml + + cat /tmp/image_override.yaml + # Deploy command + mv openreplay/charts/{ingress-nginx,peers,quickwit} /tmp + rm -rf openreplay/charts/* + mv /tmp/{ingress-nginx,peers,quickwit} openreplay/charts/ + helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f - env: DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} ENVIRONMENT: staging + - name: Alert slack + if: ${{ failure() }} + uses: rtCamp/action-slack-notify@v2 + env: + SLACK_CHANNEL: foss + SLACK_TITLE: "Failed ${{ github.workflow }}" + SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff' + SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }} + SLACK_USERNAME: "OR Bot" + SLACK_MESSAGE: 'Build failed :bomb:' + # - name: Debug Job # if: ${{ failure() }} # uses: mxschmitt/action-tmate@v3 @@ -65,4 +137,4 @@ jobs: # DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} # IMAGE_TAG: ${{ github.sha }} # ENVIRONMENT: staging - # + diff --git a/.github/workflows/sourcemaps-reader-ee.yaml b/.github/workflows/sourcemaps-reader-ee.yaml new file mode 100644 index 000000000..0bee8ba4e --- /dev/null +++ b/.github/workflows/sourcemaps-reader-ee.yaml @@ -0,0 +1,142 @@ +# This action will push the sourcemapreader changes to aws +on: + workflow_dispatch: + inputs: + skip_security_checks: + description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false' + required: false + default: 'false' + push: + branches: + - dev + - api-* + paths: + - "sourcemap-reader/**" + - "!sourcemap-reader/.gitignore" + - "!sourcemap-reader/*-dev.sh" + +name: Build and Deploy sourcemap-reader + +jobs: + deploy: + name: Deploy + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + # We need to diff with old commit + # to see which workers got changed. + fetch-depth: 2 + + - name: Docker login + run: | + docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}" + + - uses: azure/k8s-set-context@v1 + with: + method: kubeconfig + kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret. + id: setcontext + + # Caching docker images + - uses: satackey/action-docker-layer-caching@v0.0.11 + # Ignore the failure of a step and avoid terminating the job. + continue-on-error: true + + + - name: Building and Pushing sourcemaps-reader image + id: build-image + env: + DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} + IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee + ENVIRONMENT: staging + run: | + skip_security_checks=${{ github.event.inputs.skip_security_checks }} + cd sourcemap-reader + PUSH_IMAGE=0 bash -x ./build.sh + [[ "x$skip_security_checks" == "xtrue" ]] || { + curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ + images=("sourcemaps-reader") + for image in ${images[*]};do + ./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG + done + err_code=$? + [[ $err_code -ne 0 ]] && { + exit $err_code + } + } && { + echo "Skipping Security Checks" + } + images=("sourcemaps-reader") + for image in ${images[*]};do + docker push $DOCKER_REPO/$image:$IMAGE_TAG + done + - name: Creating old image input + run: | + # + # Create yaml with existing image tags + # + kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\ + tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt + + echo > /tmp/image_override.yaml + + for line in `cat /tmp/image_tag.txt`; + do + image_array=($(echo "$line" | tr ':' '\n')) + cat <> /tmp/image_override.yaml + ${image_array[0]}: + image: + tag: ${image_array[1]} + EOF + done + + - name: Deploy to kubernetes + run: | + cd scripts/helmcharts/ + + ## Update secerts + sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml + sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.EE_PG_PASSWORD }}\"/g" vars.yaml + sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\"/g" vars.yaml + sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.EE_MINIO_SECRET_KEY }}\"/g" vars.yaml + sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.EE_JWT_SECRET }}\"/g" vars.yaml + sed -i "s/domainName: \"\"/domainName: \"${{ secrets.EE_DOMAIN_NAME }}\"/g" vars.yaml + sed -i "s/enterpriseEditionLicense: \"\"/enterpriseEditionLicense: \"${{ secrets.EE_LICENSE_KEY }}\"/g" vars.yaml + + # Update changed image tag + sed -i "/sourcemaps-reader/{n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml + sed -i "s/sourcemaps-reader/sourcemapreader/g" /tmp/image_override.yaml + + cat /tmp/image_override.yaml + # Deploy command + mv openreplay/charts/{ingress-nginx,sourcemapreader,quickwit} /tmp + rm -rf openreplay/charts/* + mv /tmp/{ingress-nginx,sourcemapreader,quickwit} openreplay/charts/ + helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f - + env: + DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} + IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} + ENVIRONMENT: staging + + - name: Alert slack + if: ${{ failure() }} + uses: rtCamp/action-slack-notify@v2 + env: + SLACK_CHANNEL: foss + SLACK_TITLE: "Failed ${{ github.workflow }}" + SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff' + SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }} + SLACK_USERNAME: "OR Bot" + SLACK_MESSAGE: 'Build failed :bomb:' + + # - name: Debug Job + # if: ${{ failure() }} + # uses: mxschmitt/action-tmate@v3 + # env: + # DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} + # IMAGE_TAG: ${{ github.sha }} + # ENVIRONMENT: staging + diff --git a/.github/workflows/sourcemaps-reader.yaml b/.github/workflows/sourcemaps-reader.yaml index 095a70784..bbc7ae887 100644 --- a/.github/workflows/sourcemaps-reader.yaml +++ b/.github/workflows/sourcemaps-reader.yaml @@ -1,9 +1,15 @@ -# This action will push the chalice changes to aws +# This action will push the sourcemapreader changes to aws on: workflow_dispatch: + inputs: + skip_security_checks: + description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false' + required: false + default: 'false' push: branches: - dev + - api-* paths: - "sourcemap-reader/**" - "!sourcemap-reader/.gitignore" @@ -47,8 +53,26 @@ jobs: IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} ENVIRONMENT: staging run: | + skip_security_checks=${{ github.event.inputs.skip_security_checks }} cd sourcemap-reader - PUSH_IMAGE=1 bash build.sh + PUSH_IMAGE=0 bash -x ./build.sh + [[ "x$skip_security_checks" == "xtrue" ]] || { + curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ + images=("sourcemaps-reader") + for image in ${images[*]};do + ./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG + done + err_code=$? + [[ $err_code -ne 0 ]] && { + exit $err_code + } + } && { + echo "Skipping Security Checks" + } + images=("sourcemaps-reader") + for image in ${images[*]};do + docker push $DOCKER_REPO/$image:$IMAGE_TAG + done - name: Creating old image input run: | # @@ -82,19 +106,31 @@ jobs: sed -i "s/domainName: \"\"/domainName: \"${{ secrets.OSS_DOMAIN_NAME }}\"/g" vars.yaml # Update changed image tag - sed -i "/chalice/{n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml + sed -i "/sourcemaps-reader/{n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml + sed -i "s/sourcemaps-reader/sourcemapreader/g" /tmp/image_override.yaml cat /tmp/image_override.yaml # Deploy command - mv openreplay/charts/{ingress-nginx,chalice,quickwit} /tmp + mv openreplay/charts/{ingress-nginx,sourcemapreader,quickwit} /tmp rm -rf openreplay/charts/* - mv /tmp/{ingress-nginx,chalice,quickwit} openreplay/charts/ + mv /tmp/{ingress-nginx,sourcemapreader,quickwit} openreplay/charts/ helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f - env: DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} ENVIRONMENT: staging + - name: Alert slack + if: ${{ failure() }} + uses: rtCamp/action-slack-notify@v2 + env: + SLACK_CHANNEL: foss + SLACK_TITLE: "Failed ${{ github.workflow }}" + SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff' + SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }} + SLACK_USERNAME: "OR Bot" + SLACK_MESSAGE: 'Build failed :bomb:' + # - name: Debug Job # if: ${{ failure() }} # uses: mxschmitt/action-tmate@v3 @@ -102,4 +138,4 @@ jobs: # DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} # IMAGE_TAG: ${{ github.sha }} # ENVIRONMENT: staging - # + diff --git a/api/app.py b/api/app.py index 43c3b7945..50cd7342f 100644 --- a/api/app.py +++ b/api/app.py @@ -1,4 +1,5 @@ import logging +from contextlib import asynccontextmanager from apscheduler.schedulers.asyncio import AsyncIOScheduler from decouple import config @@ -12,9 +13,42 @@ from chalicelib.utils import pg_client from routers import core, core_dynamic from routers.crons import core_crons from routers.crons import core_dynamic_crons -from routers.subs import insights, metrics, v1_api +from routers.subs import insights, metrics, v1_api, health -app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default="")) +loglevel = config("LOGLEVEL", default=logging.INFO) +print(f">Loglevel set to: {loglevel}") +logging.basicConfig(level=loglevel) + + +@asynccontextmanager +async def lifespan(app: FastAPI): + # Startup + logging.info(">>>>> starting up <<<<<") + ap_logger = logging.getLogger('apscheduler') + ap_logger.setLevel(loglevel) + + app.schedule = AsyncIOScheduler() + await pg_client.init() + app.schedule.start() + + for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs: + app.schedule.add_job(id=job["func"].__name__, **job) + + ap_logger.info(">Scheduled jobs:") + for job in app.schedule.get_jobs(): + ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) + + # App listening + yield + + # Shutdown + logging.info(">>>>> shutting down <<<<<") + app.schedule.shutdown(wait=False) + await pg_client.terminate() + + +app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default=""), + lifespan=lifespan) app.add_middleware(GZipMiddleware, minimum_size=1000) @@ -51,39 +85,13 @@ app.include_router(core_dynamic.app_apikey) app.include_router(metrics.app) app.include_router(insights.app) app.include_router(v1_api.app_apikey) +app.include_router(health.public_app) +app.include_router(health.app) +app.include_router(health.app_apikey) -loglevel = config("LOGLEVEL", default=logging.INFO) -print(f">Loglevel set to: {loglevel}") -logging.basicConfig(level=loglevel) -ap_logger = logging.getLogger('apscheduler') -ap_logger.setLevel(loglevel) -app.schedule = AsyncIOScheduler() - - -@app.on_event("startup") -async def startup(): - logging.info(">>>>> starting up <<<<<") - await pg_client.init() - app.schedule.start() - - for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs: - app.schedule.add_job(id=job["func"].__name__, **job) - - ap_logger.info(">Scheduled jobs:") - for job in app.schedule.get_jobs(): - ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) - - -@app.on_event("shutdown") -async def shutdown(): - logging.info(">>>>> shutting down <<<<<") - app.schedule.shutdown(wait=False) - await pg_client.terminate() - - -@app.get('/private/shutdown', tags=["private"]) -async def stop_server(): - logging.info("Requested shutdown") - await shutdown() - import os, signal - os.kill(1, signal.SIGTERM) +# @app.get('/private/shutdown', tags=["private"]) +# async def stop_server(): +# logging.info("Requested shutdown") +# await shutdown() +# import os, signal +# os.kill(1, signal.SIGTERM) diff --git a/api/app_alerts.py b/api/app_alerts.py index 111bad2a1..02147ef23 100644 --- a/api/app_alerts.py +++ b/api/app_alerts.py @@ -1,33 +1,17 @@ import logging +from contextlib import asynccontextmanager from apscheduler.schedulers.asyncio import AsyncIOScheduler from decouple import config from fastapi import FastAPI -from chalicelib.utils import pg_client from chalicelib.core import alerts_processor - -app = FastAPI(root_path="/alerts", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default="")) -logging.info("============= ALERTS =============") +from chalicelib.utils import pg_client -@app.get("/") -async def root(): - return {"status": "Running"} - - -app.schedule = AsyncIOScheduler() - -loglevel = config("LOGLEVEL", default=logging.INFO) -print(f">Loglevel set to: {loglevel}") -logging.basicConfig(level=loglevel) -ap_logger = logging.getLogger('apscheduler') -ap_logger.setLevel(loglevel) -app.schedule = AsyncIOScheduler() - - -@app.on_event("startup") -async def startup(): +@asynccontextmanager +async def lifespan(app: FastAPI): + # Startup logging.info(">>>>> starting up <<<<<") await pg_client.init() app.schedule.start() @@ -39,24 +23,44 @@ async def startup(): for job in app.schedule.get_jobs(): ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) + # App listening + yield -@app.on_event("shutdown") -async def shutdown(): + # Shutdown logging.info(">>>>> shutting down <<<<<") app.schedule.shutdown(wait=False) await pg_client.terminate() -@app.get('/private/shutdown', tags=["private"]) -async def stop_server(): - logging.info("Requested shutdown") - await shutdown() - import os, signal - os.kill(1, signal.SIGTERM) +app = FastAPI(root_path="/alerts", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default=""), + lifespan=lifespan) +logging.info("============= ALERTS =============") +@app.get("/") +async def root(): + return {"status": "Running"} + + +@app.get("/health") +async def get_health_status(): + return {"data": { + "health": True, + "details": {"version": config("version_number", default="unknown")} + }} + + +app.schedule = AsyncIOScheduler() + +loglevel = config("LOGLEVEL", default=logging.INFO) +print(f">Loglevel set to: {loglevel}") +logging.basicConfig(level=loglevel) +ap_logger = logging.getLogger('apscheduler') +ap_logger.setLevel(loglevel) +app.schedule = AsyncIOScheduler() + if config("LOCAL_DEV", default=False, cast=bool): - @app.get('/private/trigger', tags=["private"]) + @app.get('/trigger', tags=["private"]) async def trigger_main_cron(): logging.info("Triggering main cron") alerts_processor.process() diff --git a/api/build.sh b/api/build.sh index 946dbf3b6..a66a734d0 100644 --- a/api/build.sh +++ b/api/build.sh @@ -30,7 +30,7 @@ check_prereq() { [[ $1 == ee ]] && ee=true [[ $PATCH -eq 1 ]] && { image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')" - [[ $ee == "true" ]] && { + [[ $ee == "true" ]] && { image_tag="${image_tag}-ee" } } @@ -78,12 +78,6 @@ function build_api(){ check_prereq build_api $environment echo buil_complete -#IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO SIGN_IMAGE=$SIGN_IMAGE SIGN_KEY=$SIGN_KEY bash build_alerts.sh $1 -# -#[[ $environment == "ee" ]] && { -# cp ../ee/api/build_crons.sh . -# IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO SIGN_IMAGE=$SIGN_IMAGE SIGN_KEY=$SIGN_KEY bash build_crons.sh $1 -# exit_err $? -# rm build_crons.sh -#} || true -[[ $PATCH -eq 1 ]] && update_helm_release chalice +if [[ $PATCH -eq 1 ]]; then + update_helm_release chalice +fi \ No newline at end of file diff --git a/api/build_alerts.sh b/api/build_alerts.sh index 5047780fe..f8b1929c6 100644 --- a/api/build_alerts.sh +++ b/api/build_alerts.sh @@ -8,7 +8,7 @@ # Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh git_sha=$(git rev-parse --short HEAD) -image_tag=${IMAGE_TAG:-$git_sha} +image_tag=${IMAGE_TAG:-git_sha} envarg="default-foss" check_prereq() { which docker || { @@ -21,7 +21,7 @@ check_prereq() { [[ $1 == ee ]] && ee=true [[ $PATCH -eq 1 ]] && { image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')" - [[ $ee == "true" ]] && { + [[ $ee == "true" ]] && { image_tag="${image_tag}-ee" } } @@ -68,4 +68,6 @@ function build_alerts(){ check_prereq build_alerts $1 -[[ $PATCH -eq 1 ]] && update_helm_release alerts +if [[ $PATCH -eq 1 ]]; then + update_helm_release alerts +fi diff --git a/api/chalicelib/core/alerts.py b/api/chalicelib/core/alerts.py index 3c8b00c54..dfa86ed75 100644 --- a/api/chalicelib/core/alerts.py +++ b/api/chalicelib/core/alerts.py @@ -116,7 +116,7 @@ def process_notifications(data): BATCH_SIZE = 200 for t in full.keys(): for i in range(0, len(full[t]), BATCH_SIZE): - notifications_list = full[t][i:i + BATCH_SIZE] + notifications_list = full[t][i:min(i + BATCH_SIZE, len(full[t]))] if notifications_list is None or len(notifications_list) == 0: break diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 288d8a9b7..fcc845b7f 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -4,8 +4,7 @@ from os.path import exists as path_exists, getsize import jwt import requests from decouple import config -from starlette import status -from fastapi import HTTPException +from fastapi import HTTPException, status import schemas from chalicelib.core import projects diff --git a/api/chalicelib/core/collaboration_msteams.py b/api/chalicelib/core/collaboration_msteams.py index eb60fd653..3b8b2a05e 100644 --- a/api/chalicelib/core/collaboration_msteams.py +++ b/api/chalicelib/core/collaboration_msteams.py @@ -2,8 +2,7 @@ import json import requests from decouple import config -from fastapi import HTTPException -from starlette import status +from fastapi import HTTPException, status import schemas from chalicelib.core import webhook diff --git a/api/chalicelib/core/collaboration_slack.py b/api/chalicelib/core/collaboration_slack.py index 1879b3c2d..707e28c59 100644 --- a/api/chalicelib/core/collaboration_slack.py +++ b/api/chalicelib/core/collaboration_slack.py @@ -1,9 +1,8 @@ -import requests -from decouple import config from datetime import datetime -from fastapi import HTTPException -from starlette import status +import requests +from decouple import config +from fastapi import HTTPException, status import schemas from chalicelib.core import webhook diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index c40316067..9ee84b018 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -2,8 +2,7 @@ import json from typing import Union from decouple import config -from fastapi import HTTPException -from starlette import status +from fastapi import HTTPException, status import schemas from chalicelib.core import sessions, funnels, errors, issues, metrics, click_maps, sessions_mobs diff --git a/api/chalicelib/core/health.py b/api/chalicelib/core/health.py new file mode 100644 index 000000000..76fcf35ca --- /dev/null +++ b/api/chalicelib/core/health.py @@ -0,0 +1,162 @@ +from urllib.parse import urlparse + +import redis +import requests +from decouple import config + +from chalicelib.utils import pg_client + +HEALTH_ENDPOINTS = { + "alerts": "http://alerts-openreplay.app.svc.cluster.local:8888/health", + "assets": "http://assets-openreplay.app.svc.cluster.local:8888/metrics", + "assist": "http://assist-openreplay.app.svc.cluster.local:8888/health", + "chalice": "http://chalice-openreplay.app.svc.cluster.local:8888/metrics", + "db": "http://db-openreplay.app.svc.cluster.local:8888/metrics", + "ender": "http://ender-openreplay.app.svc.cluster.local:8888/metrics", + "heuristics": "http://heuristics-openreplay.app.svc.cluster.local:8888/metrics", + "http": "http://http-openreplay.app.svc.cluster.local:8888/metrics", + "ingress-nginx": "http://ingress-nginx-openreplay.app.svc.cluster.local:8888/metrics", + "integrations": "http://integrations-openreplay.app.svc.cluster.local:8888/metrics", + "peers": "http://peers-openreplay.app.svc.cluster.local:8888/health", + "sink": "http://sink-openreplay.app.svc.cluster.local:8888/metrics", + "sourcemaps-reader": "http://sourcemapreader-openreplay.app.svc.cluster.local:8888/health", + "storage": "http://storage-openreplay.app.svc.cluster.local:8888/metrics", +} + + +def __check_database_pg(): + fail_response = { + "health": False, + "details": { + "errors": ["Postgres health-check failed"] + } + } + with pg_client.PostgresClient() as cur: + try: + cur.execute("SHOW server_version;") + server_version = cur.fetchone() + except Exception as e: + print("!! health failed: postgres not responding") + print(str(e)) + return fail_response + try: + cur.execute("SELECT openreplay_version() AS version;") + schema_version = cur.fetchone() + except Exception as e: + print("!! health failed: openreplay_version not defined") + print(str(e)) + return fail_response + return { + "health": True, + "details": { + # "version": server_version["server_version"], + # "schema": schema_version["version"] + } + } + + +def __not_supported(): + return {"errors": ["not supported"]} + + +def __always_healthy(): + return { + "health": True, + "details": {} + } + + +def __check_be_service(service_name): + def fn(): + fail_response = { + "health": False, + "details": { + "errors": ["server health-check failed"] + } + } + try: + results = requests.get(HEALTH_ENDPOINTS.get(service_name), timeout=2) + if results.status_code != 200: + print(f"!! issue with the {service_name}-health code:{results.status_code}") + print(results.text) + # fail_response["details"]["errors"].append(results.text) + return fail_response + except requests.exceptions.Timeout: + print(f"!! Timeout getting {service_name}-health") + # fail_response["details"]["errors"].append("timeout") + return fail_response + except Exception as e: + print(f"!! Issue getting {service_name}-health response") + print(str(e)) + try: + print(results.text) + # fail_response["details"]["errors"].append(results.text) + except: + print("couldn't get response") + # fail_response["details"]["errors"].append(str(e)) + return fail_response + return { + "health": True, + "details": {} + } + + return fn + + +def __check_redis(): + fail_response = { + "health": False, + "details": {"errors": ["server health-check failed"]} + } + if config("REDIS_STRING", default=None) is None: + # fail_response["details"]["errors"].append("REDIS_STRING not defined in env-vars") + return fail_response + + try: + u = urlparse(config("REDIS_STRING")) + r = redis.Redis(host=u.hostname, port=u.port, socket_timeout=2) + r.ping() + except Exception as e: + print("!! Issue getting redis-health response") + print(str(e)) + # fail_response["details"]["errors"].append(str(e)) + return fail_response + + return { + "health": True, + "details": { + # "version": r.execute_command('INFO')['redis_version'] + } + } + + +def get_health(): + health_map = { + "databases": { + "postgres": __check_database_pg + }, + "ingestionPipeline": { + "redis": __check_redis + }, + "backendServices": { + "alerts": __check_be_service("alerts"), + "assets": __check_be_service("assets"), + "assist": __check_be_service("assist"), + "chalice": __always_healthy, + "db": __check_be_service("db"), + "ender": __check_be_service("ender"), + "frontend": __always_healthy, + "heuristics": __check_be_service("heuristics"), + "http": __check_be_service("http"), + "ingress-nginx": __always_healthy, + "integrations": __check_be_service("integrations"), + "peers": __check_be_service("peers"), + "sink": __check_be_service("sink"), + "sourcemaps-reader": __check_be_service("sourcemaps-reader"), + "storage": __check_be_service("storage") + } + } + for parent_key in health_map.keys(): + for element_key in health_map[parent_key]: + health_map[parent_key][element_key] = health_map[parent_key][element_key]() + return health_map diff --git a/api/chalicelib/core/metadata.py b/api/chalicelib/core/metadata.py index eba0d7a22..909ecff23 100644 --- a/api/chalicelib/core/metadata.py +++ b/api/chalicelib/core/metadata.py @@ -1,8 +1,7 @@ import re from typing import Optional -from fastapi import HTTPException -from starlette import status +from fastapi import HTTPException, status from chalicelib.core import projects from chalicelib.utils import pg_client diff --git a/api/chalicelib/core/projects.py b/api/chalicelib/core/projects.py index 24d1e01f7..865a4a73c 100644 --- a/api/chalicelib/core/projects.py +++ b/api/chalicelib/core/projects.py @@ -1,8 +1,7 @@ import json from typing import Optional -from fastapi import HTTPException -from starlette import status +from fastapi import HTTPException, status import schemas from chalicelib.core import users @@ -54,6 +53,7 @@ def __create(tenant_id, name): def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, stack_integrations=False): + stack_integrations = False with pg_client.PostgresClient() as cur: extra_projection = "" extra_join = "" diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index c95bed903..8f98aac83 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -1,10 +1,7 @@ from typing import List import schemas -from chalicelib.core import events, metadata, events_ios, \ - sessions_mobs, issues, projects, resources, assist, performance_event, sessions_favorite, \ - sessions_devtool, sessions_notes -from chalicelib.utils import errors_helper +from chalicelib.core import events, metadata, projects, performance_event, sessions_favorite from chalicelib.utils import pg_client, helper, metrics_helper from chalicelib.utils import sql_helper as sh @@ -33,89 +30,6 @@ COALESCE((SELECT TRUE AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """ -def __group_metadata(session, project_metadata): - meta = {} - for m in project_metadata.keys(): - if project_metadata[m] is not None and session.get(m) is not None: - meta[project_metadata[m]] = session[m] - session.pop(m) - return meta - - -def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, - group_metadata=False, live=True): - with pg_client.PostgresClient() as cur: - extra_query = [] - if include_fav_viewed: - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_favorite_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS favorite""") - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS viewed""") - query = cur.mogrify( - f"""\ - SELECT - s.*, - s.session_id::text AS session_id, - (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key - {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} - {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} - FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} - WHERE s.project_id = %(project_id)s - AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": context.user_id} - ) - # print("===============") - # print(query) - cur.execute(query=query) - - data = cur.fetchone() - if data is not None: - data = helper.dict_to_camel_case(data) - if full_data: - if data["platform"] == 'ios': - data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) - for e in data['events']: - if e["type"].endswith("_IOS"): - e["type"] = e["type"][:-len("_IOS")] - data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) - data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, - session_id=session_id) - data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) - else: - data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, - group_clickrage=True) - all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) - data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] - # to keep only the first stack - # limit the number of errors to reduce the response-body size - data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors - if e['source'] == "js_exception"][:500] - data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, - session_id=session_id) - data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) - data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) - data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id) - data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, - start_ts=data["startTs"], duration=data["duration"]) - - data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, - session_id=session_id, user_id=context.user_id) - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) - data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) - data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, - project_key=data["projectKey"]) - data["inDB"] = True - return data - elif live: - return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) - else: - return None - - # This function executes the query and return result def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False): diff --git a/api/chalicelib/core/sessions_favorite.py b/api/chalicelib/core/sessions_favorite.py index 00228b31f..d3bf5e9b4 100644 --- a/api/chalicelib/core/sessions_favorite.py +++ b/api/chalicelib/core/sessions_favorite.py @@ -1,5 +1,4 @@ import schemas -from chalicelib.core import sessions from chalicelib.utils import pg_client @@ -8,11 +7,14 @@ def add_favorite_session(context: schemas.CurrentContext, project_id, session_id cur.execute( cur.mogrify(f"""\ INSERT INTO public.user_favorite_sessions(user_id, session_id) - VALUES (%(userId)s,%(session_id)s);""", + VALUES (%(userId)s,%(session_id)s) + RETURNING session_id;""", {"userId": context.user_id, "session_id": session_id}) ) - return sessions.get_by_id2_pg(context=context, project_id=project_id, session_id=session_id, - full_data=False, include_fav_viewed=True) + row = cur.fetchone() + if row: + return {"data": {"sessionId": session_id}} + return {"errors": ["something went wrong"]} def remove_favorite_session(context: schemas.CurrentContext, project_id, session_id): @@ -21,11 +23,14 @@ def remove_favorite_session(context: schemas.CurrentContext, project_id, session cur.mogrify(f"""\ DELETE FROM public.user_favorite_sessions WHERE user_id = %(userId)s - AND session_id = %(session_id)s;""", + AND session_id = %(session_id)s + RETURNING session_id;""", {"userId": context.user_id, "session_id": session_id}) ) - return sessions.get_by_id2_pg(context=context, project_id=project_id, session_id=session_id, - full_data=False, include_fav_viewed=True) + row = cur.fetchone() + if row: + return {"data": {"sessionId": session_id}} + return {"errors": ["something went wrong"]} def favorite_session(context: schemas.CurrentContext, project_id, session_id): diff --git a/api/chalicelib/core/sessions_replay.py b/api/chalicelib/core/sessions_replay.py new file mode 100644 index 000000000..94e3cc504 --- /dev/null +++ b/api/chalicelib/core/sessions_replay.py @@ -0,0 +1,186 @@ +import schemas +from chalicelib.core import events, metadata, events_ios, \ + sessions_mobs, issues, resources, assist, sessions_devtool, sessions_notes +from chalicelib.utils import errors_helper +from chalicelib.utils import pg_client, helper + + +def __group_metadata(session, project_metadata): + meta = {} + for m in project_metadata.keys(): + if project_metadata[m] is not None and session.get(m) is not None: + meta[project_metadata[m]] = session[m] + session.pop(m) + return meta + + +# for backward compatibility +def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, + group_metadata=False, live=True): + with pg_client.PostgresClient() as cur: + extra_query = [] + if include_fav_viewed: + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_favorite_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS favorite""") + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_viewed_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS viewed""") + query = cur.mogrify( + f"""\ + SELECT + s.*, + s.session_id::text AS session_id, + (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key + {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} + {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} + FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + data = cur.fetchone() + if data is not None: + data = helper.dict_to_camel_case(data) + if full_data: + if data["platform"] == 'ios': + data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) + for e in data['events']: + if e["type"].endswith("_IOS"): + e["type"] = e["type"][:-len("_IOS")] + data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) + data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, + session_id=session_id) + data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) + else: + data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, + group_clickrage=True) + all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) + data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] + # to keep only the first stack + # limit the number of errors to reduce the response-body size + data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors + if e['source'] == "js_exception"][:500] + data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, + session_id=session_id) + data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id) + data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + start_ts=data["startTs"], duration=data["duration"]) + + data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, + session_id=session_id, user_id=context.user_id) + data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) + data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, + project_key=data["projectKey"]) + data["inDB"] = True + return data + elif live: + return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) + else: + return None + + +def get_replay(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, + group_metadata=False, live=True): + with pg_client.PostgresClient() as cur: + extra_query = [] + if include_fav_viewed: + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_favorite_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS favorite""") + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_viewed_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS viewed""") + query = cur.mogrify( + f"""\ + SELECT + s.*, + s.session_id::text AS session_id, + (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key + {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} + {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} + FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + data = cur.fetchone() + if data is not None: + data = helper.dict_to_camel_case(data) + if full_data: + if data["platform"] == 'ios': + data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) + else: + data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id) + + data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, + project_key=data["projectKey"]) + data["inDB"] = True + return data + elif live: + return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) + else: + return None + + +def get_events(project_id, session_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify( + f"""SELECT session_id, platform, start_ts, duration + FROM public.sessions AS s + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + s_data = cur.fetchone() + if s_data is not None: + s_data = helper.dict_to_camel_case(s_data) + data = {} + if s_data["platform"] == 'ios': + data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) + for e in data['events']: + if e["type"].endswith("_IOS"): + e["type"] = e["type"][:-len("_IOS")] + data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) + data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, + session_id=session_id) + else: + data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, + group_clickrage=True) + all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) + data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] + # to keep only the first stack + # limit the number of errors to reduce the response-body size + data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors + if e['source'] == "js_exception"][:500] + data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, + session_id=session_id) + data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + start_ts=s_data["startTs"], duration=s_data["duration"]) + + data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) + return data + else: + return None diff --git a/api/chalicelib/core/signup.py b/api/chalicelib/core/signup.py index 3a56f9186..ec4d99b3b 100644 --- a/api/chalicelib/core/signup.py +++ b/api/chalicelib/core/signup.py @@ -8,7 +8,7 @@ from chalicelib.utils import pg_client from chalicelib.utils.TimeUTC import TimeUTC -def create_step1(data: schemas.UserSignupSchema): +def create_tenant(data: schemas.UserSignupSchema): print(f"===================== SIGNUP STEP 1 AT {TimeUTC.to_human_readable(TimeUTC.now())} UTC") errors = [] if tenants.tenants_exists(): diff --git a/api/chalicelib/core/tenants.py b/api/chalicelib/core/tenants.py index 5479178d8..4d95ae491 100644 --- a/api/chalicelib/core/tenants.py +++ b/api/chalicelib/core/tenants.py @@ -68,7 +68,7 @@ def update(tenant_id, user_id, data: schemas.UpdateTenantSchema): return edit_client(tenant_id=tenant_id, changes=changes) -def tenants_exists(): - with pg_client.PostgresClient() as cur: +def tenants_exists(use_pool=True): + with pg_client.PostgresClient(use_pool=use_pool) as cur: cur.execute(f"SELECT EXISTS(SELECT 1 FROM public.tenants)") return cur.fetchone()["exists"] diff --git a/api/chalicelib/core/webhook.py b/api/chalicelib/core/webhook.py index d0ed97d08..fb0906b9c 100644 --- a/api/chalicelib/core/webhook.py +++ b/api/chalicelib/core/webhook.py @@ -2,8 +2,7 @@ import logging from typing import Optional import requests -from fastapi import HTTPException -from starlette import status +from fastapi import HTTPException, status import schemas from chalicelib.utils import pg_client, helper diff --git a/api/chalicelib/utils/github_client_v3.py b/api/chalicelib/utils/github_client_v3.py index 650aeb4fe..6f2d093c6 100644 --- a/api/chalicelib/utils/github_client_v3.py +++ b/api/chalicelib/utils/github_client_v3.py @@ -1,8 +1,7 @@ -import requests from datetime import datetime -from fastapi import HTTPException -from starlette import status +import requests +from fastapi import HTTPException, status class github_formatters: diff --git a/api/chalicelib/utils/jira_client.py b/api/chalicelib/utils/jira_client.py index 4cd6fe566..b55e11cfe 100644 --- a/api/chalicelib/utils/jira_client.py +++ b/api/chalicelib/utils/jira_client.py @@ -2,11 +2,10 @@ import time from datetime import datetime import requests +from fastapi import HTTPException, status from jira import JIRA from jira.exceptions import JIRAError from requests.auth import HTTPBasicAuth -from starlette import status -from fastapi import HTTPException fields = "id, summary, description, creator, reporter, created, assignee, status, updated, comment, issuetype, labels" diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index 69a5b5a8b..64ca1719f 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -87,9 +87,10 @@ class PostgresClient: long_query = False unlimited_query = False - def __init__(self, long_query=False, unlimited_query=False): + def __init__(self, long_query=False, unlimited_query=False, use_pool=True): self.long_query = long_query self.unlimited_query = unlimited_query + self.use_pool = use_pool if unlimited_query: long_config = dict(_PG_CONFIG) long_config["application_name"] += "-UNLIMITED" @@ -100,7 +101,7 @@ class PostgresClient: long_config["options"] = f"-c statement_timeout=" \ f"{config('pg_long_timeout', cast=int, default=5 * 60) * 1000}" self.connection = psycopg2.connect(**long_config) - elif not config('PG_POOL', cast=bool, default=True): + elif not use_pool or not config('PG_POOL', cast=bool, default=True): single_config = dict(_PG_CONFIG) single_config["application_name"] += "-NOPOOL" single_config["options"] = f"-c statement_timeout={config('PG_TIMEOUT', cast=int, default=30) * 1000}" @@ -111,6 +112,8 @@ class PostgresClient: def __enter__(self): if self.cursor is None: self.cursor = self.connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor) + self.cursor.cursor_execute = self.cursor.execute + self.cursor.execute = self.__execute self.cursor.recreate = self.recreate_cursor return self.cursor @@ -118,11 +121,12 @@ class PostgresClient: try: self.connection.commit() self.cursor.close() - if self.long_query or self.unlimited_query: + if not self.use_pool or self.long_query or self.unlimited_query: self.connection.close() except Exception as error: logging.error("Error while committing/closing PG-connection", error) if str(error) == "connection already closed" \ + and self.use_pool \ and not self.long_query \ and not self.unlimited_query \ and config('PG_POOL', cast=bool, default=True): @@ -132,10 +136,22 @@ class PostgresClient: raise error finally: if config('PG_POOL', cast=bool, default=True) \ + and self.use_pool \ and not self.long_query \ and not self.unlimited_query: postgreSQL_pool.putconn(self.connection) + def __execute(self, query, vars=None): + try: + result = self.cursor.cursor_execute(query=query, vars=vars) + except psycopg2.Error as error: + logging.error(f"!!! Error of type:{type(error)} while executing query:") + logging.error(query) + logging.info("starting rollback to allow future execution") + self.connection.rollback() + raise error + return result + def recreate_cursor(self, rollback=False): if rollback: try: diff --git a/api/chalicelib/utils/s3.py b/api/chalicelib/utils/s3.py index 366a5d181..655628602 100644 --- a/api/chalicelib/utils/s3.py +++ b/api/chalicelib/utils/s3.py @@ -16,7 +16,8 @@ else: aws_access_key_id=config("S3_KEY"), aws_secret_access_key=config("S3_SECRET"), config=Config(signature_version='s3v4'), - region_name=config("sessions_region")) + region_name=config("sessions_region"), + verify=not config("S3_DISABLE_SSL_VERIFY", default=False, cast=bool)) def __get_s3_resource(): @@ -26,7 +27,8 @@ def __get_s3_resource(): aws_access_key_id=config("S3_KEY"), aws_secret_access_key=config("S3_SECRET"), config=Config(signature_version='s3v4'), - region_name=config("sessions_region")) + region_name=config("sessions_region"), + verify=not config("S3_DISABLE_SSL_VERIFY", default=False, cast=bool)) def exists(bucket, key): @@ -81,7 +83,8 @@ def get_presigned_url_for_upload_secure(bucket, expires_in, key, conditions=None Conditions=conditions, ) req = PreparedRequest() - req.prepare_url(f"{url_parts['url']}/{url_parts['fields']['key']}", url_parts['fields']) + req.prepare_url( + f"{url_parts['url']}/{url_parts['fields']['key']}", url_parts['fields']) return req.url @@ -101,7 +104,8 @@ def get_file(source_bucket, source_key): def rename(source_bucket, source_key, target_bucket, target_key): s3 = __get_s3_resource() - s3.Object(target_bucket, target_key).copy_from(CopySource=f'{source_bucket}/{source_key}') + s3.Object(target_bucket, target_key).copy_from( + CopySource=f'{source_bucket}/{source_key}') s3.Object(source_bucket, source_key).delete() diff --git a/api/entrypoint.sh b/api/entrypoint.sh index e140268ef..401046526 100755 --- a/api/entrypoint.sh +++ b/api/entrypoint.sh @@ -1,3 +1,3 @@ #!/bin/sh -uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload --proxy-headers +uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --proxy-headers diff --git a/api/entrypoint_alerts.sh b/api/entrypoint_alerts.sh index dedfa102b..9ac93dd60 100755 --- a/api/entrypoint_alerts.sh +++ b/api/entrypoint_alerts.sh @@ -1,3 +1,3 @@ #!/bin/sh export ASSIST_KEY=ignore -uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload +uvicorn app:app --host 0.0.0.0 --port 8888 diff --git a/api/env.default b/api/env.default index 78acd001c..074d9b643 100644 --- a/api/env.default +++ b/api/env.default @@ -10,6 +10,7 @@ EMAIL_USE_TLS=true S3_HOST= S3_KEY= S3_SECRET= +S3_DISABLE_SSL_VERIFY= SITE_URL= announcement_url= captcha_key= @@ -39,7 +40,7 @@ PG_POOL=true sessions_bucket=mobs sessions_region=us-east-1 sourcemaps_bucket=sourcemaps -sourcemaps_reader=http://sourcemaps-reader-openreplay.app.svc.cluster.local:9000/sourcemaps/%s/sourcemaps +sourcemaps_reader=http://sourcemapreader-openreplay.app.svc.cluster.local:9000/sourcemaps/%s/sourcemaps STAGE=default-foss version_number=1.4.0 FS_DIR=/mnt/efs @@ -52,4 +53,4 @@ PRESIGNED_URL_EXPIRATION=3600 ASSIST_JWT_EXPIRATION=144000 ASSIST_JWT_SECRET= PYTHONUNBUFFERED=1 -THUMBNAILS_BUCKET=thumbnails \ No newline at end of file +REDIS_STRING=redis://redis-master.db.svc.cluster.local:6379 \ No newline at end of file diff --git a/api/requirements-alerts.txt b/api/requirements-alerts.txt index b208d28c2..804f59cb4 100644 --- a/api/requirements-alerts.txt +++ b/api/requirements-alerts.txt @@ -1,15 +1,15 @@ requests==2.28.2 -urllib3==1.26.14 -boto3==1.26.70 +urllib3==1.26.15 +boto3==1.26.100 pyjwt==2.6.0 psycopg2-binary==2.9.5 -elasticsearch==8.6.1 -jira==3.4.1 +elasticsearch==8.6.2 +jira==3.5.0 -fastapi==0.92.0 -uvicorn[standard]==0.20.0 -python-decouple==3.7 -pydantic[email]==1.10.4 -apscheduler==3.10.0 \ No newline at end of file +fastapi==0.95.0 +uvicorn[standard]==0.21.1 +python-decouple==3.8 +pydantic[email]==1.10.7 +apscheduler==3.10.1 \ No newline at end of file diff --git a/api/requirements.txt b/api/requirements.txt index 0a058a94f..2fb86ae9f 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -1,15 +1,17 @@ requests==2.28.2 -urllib3==1.26.14 -boto3==1.26.70 +urllib3==1.26.15 +boto3==1.26.100 pyjwt==2.6.0 psycopg2-binary==2.9.5 -elasticsearch==8.6.1 -jira==3.4.1 +elasticsearch==8.6.2 +jira==3.5.0 -fastapi==0.92.0 -uvicorn[standard]==0.20.0 -python-decouple==3.7 -pydantic[email]==1.10.4 -apscheduler==3.10.0 +fastapi==0.95.0 +uvicorn[standard]==0.21.1 +python-decouple==3.8 +pydantic[email]==1.10.7 +apscheduler==3.10.1 + +redis==4.5.3 \ No newline at end of file diff --git a/api/routers/core.py b/api/routers/core.py index a23e437ea..f9481f528 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -1,9 +1,8 @@ from typing import Union from decouple import config -from fastapi import Depends, Body, HTTPException, Response +from fastapi import Depends, Body, HTTPException, Response, status from fastapi.responses import JSONResponse -from starlette import status import schemas from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assignments, projects, \ @@ -11,7 +10,7 @@ from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assig log_tool_elasticsearch, log_tool_datadog, \ log_tool_stackdriver, reset_password, log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, sessions, \ log_tool_newrelic, announcements, log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, \ - assist, mobile, signup, tenants, boarding, notifications, webhook, users, \ + assist, mobile, tenants, boarding, notifications, webhook, users, \ custom_metrics, saved_search, integrations_global from chalicelib.core.collaboration_msteams import MSTeams from chalicelib.core.collaboration_slack import Slack @@ -663,12 +662,6 @@ async def mobile_signe(projectId: int, sessionId: int, data: schemas.MobileSignP return {"data": mobile.sign_keys(project_id=projectId, session_id=sessionId, keys=data.keys)} -@public_app.post('/signup', tags=['signup']) -@public_app.put('/signup', tags=['signup']) -async def signup_handler(data: schemas.UserSignupSchema = Body(...)): - return signup.create_step1(data) - - @app.post('/projects', tags=['projects']) async def create_project(data: schemas.CreateProjectSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 3389074bf..d13fc8970 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -6,7 +6,7 @@ from starlette.responses import RedirectResponse, FileResponse import schemas from chalicelib.core import sessions, errors, errors_viewed, errors_favorite, sessions_assignments, heatmaps, \ - sessions_favorite, assist, sessions_notes, click_maps + sessions_favorite, assist, sessions_notes, click_maps, sessions_replay, signup from chalicelib.core import sessions_viewed from chalicelib.core import tenants, users, projects, license from chalicelib.core import webhook @@ -27,6 +27,13 @@ async def get_all_signup(): "edition": license.EDITION}} +if not tenants.tenants_exists(use_pool=False): + @public_app.post('/signup', tags=['signup']) + @public_app.put('/signup', tags=['signup']) + async def signup_handler(data: schemas.UserSignupSchema = Body(...)): + return signup.create_tenant(data) + + @app.get('/account', tags=['accounts']) async def get_account(context: schemas.CurrentContext = Depends(OR_context)): r = users.get(tenant_id=context.tenant_id, user_id=context.user_id) @@ -145,13 +152,14 @@ async def get_projects(context: schemas.CurrentContext = Depends(OR_context)): stack_integrations=True)} -@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"]) +# for backward compatibility +@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"]) async def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): return {"errors": ["session not found"]} - data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, - include_fav_viewed=True, group_metadata=True, context=context) + data = sessions_replay.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, + include_fav_viewed=True, group_metadata=True, context=context) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): @@ -162,6 +170,37 @@ async def get_session(projectId: int, sessionId: Union[int, str], background_tas } +@app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"]) +async def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str): + return {"errors": ["session not found"]} + data = sessions_replay.get_replay(project_id=projectId, session_id=sessionId, full_data=True, + include_fav_viewed=True, group_metadata=True, context=context) + if data is None: + return {"errors": ["session not found"]} + if data.get("inDB"): + background_tasks.add_task(sessions_viewed.view_session, project_id=projectId, user_id=context.user_id, + session_id=sessionId) + return { + 'data': data + } + + +@app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"]) +async def get_session_events(projectId: int, sessionId: Union[int, str], + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str): + return {"errors": ["session not found"]} + data = sessions_replay.get_events(project_id=projectId, session_id=sessionId) + if data is None: + return {"errors": ["session not found"]} + + return { + 'data': data + } + + @app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"]) async def get_error_trace(projectId: int, sessionId: int, errorId: str, context: schemas.CurrentContext = Depends(OR_context)): @@ -239,8 +278,8 @@ async def get_live_session(projectId: int, sessionId: str, background_tasks: Bac context: schemas.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: - data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId, - full_data=True, include_fav_viewed=True, group_metadata=True, live=False) + data = sessions_replay.get_replay(context=context, project_id=projectId, session_id=sessionId, + full_data=True, include_fav_viewed=True, group_metadata=True, live=False) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): diff --git a/api/routers/subs/health.py b/api/routers/subs/health.py new file mode 100644 index 000000000..fdef52509 --- /dev/null +++ b/api/routers/subs/health.py @@ -0,0 +1,20 @@ +from fastapi import HTTPException, status + +from chalicelib.core import health, tenants +from routers.base import get_routers + +public_app, app, app_apikey = get_routers() + + +@app.get('/health', tags=["health-check"]) +def get_global_health_status(): + return {"data": health.get_health()} + + +if not tenants.tenants_exists(use_pool=False): + @public_app.get('/health', tags=["health-check"]) + def get_public_health_status(): + if tenants.tenants_exists(): + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Not Found") + + return get_global_health_status() diff --git a/api/run-alerts-dev.sh b/api/run-alerts-dev.sh index 54db30171..309356133 100755 --- a/api/run-alerts-dev.sh +++ b/api/run-alerts-dev.sh @@ -1,3 +1,3 @@ #!/bin/zsh -uvicorn app_alerts:app --reload \ No newline at end of file +uvicorn app_alerts:app --reload --port 8888 \ No newline at end of file diff --git a/api/schemas.py b/api/schemas.py index 5cae3a31a..992729870 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -500,6 +500,7 @@ class IssueType(str, Enum): crash = 'crash' custom = 'custom' js_exception = 'js_exception' + mouse_thrashing = 'mouse_thrashing' class MetricFormatType(str, Enum): diff --git a/utilities/.dockerignore b/assist/.dockerignore similarity index 100% rename from utilities/.dockerignore rename to assist/.dockerignore diff --git a/utilities/.gitignore b/assist/.gitignore similarity index 100% rename from utilities/.gitignore rename to assist/.gitignore diff --git a/utilities/Dockerfile b/assist/Dockerfile similarity index 97% rename from utilities/Dockerfile rename to assist/Dockerfile index edbaae03c..84b54c906 100644 --- a/utilities/Dockerfile +++ b/assist/Dockerfile @@ -18,4 +18,4 @@ USER 1001 ADD --chown=1001 https://static.openreplay.com/geoip/GeoLite2-Country.mmdb $MAXMINDDB_FILE ENTRYPOINT ["/sbin/tini", "--"] -CMD npm start +CMD npm start \ No newline at end of file diff --git a/utilities/build.sh b/assist/build.sh similarity index 88% rename from utilities/build.sh rename to assist/build.sh index 861d37596..3f98bcf36 100644 --- a/utilities/build.sh +++ b/assist/build.sh @@ -18,7 +18,7 @@ check_prereq() { [[ $1 == ee ]] && ee=true [[ $PATCH -eq 1 ]] && { image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')" - [[ $ee == "true" ]] && { + [[ $ee == "true" ]] && { image_tag="${image_tag}-ee" } } @@ -35,20 +35,20 @@ update_helm_release() { } function build_api(){ - destination="_utilities" + destination="_assist" [[ $1 == "ee" ]] && { - destination="_utilities_ee" + destination="_assist_ee" } - cp -R ../utilities ../${destination} + cp -R ../assist ../${destination} cd ../${destination} # Copy enterprise code [[ $1 == "ee" ]] && { - cp -rf ../ee/utilities/* ./ + cp -rf ../ee/assist/* ./ } docker build -f ./Dockerfile --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/assist:${image_tag} . - cd ../utilities + cd ../assist rm -rf ../${destination} [[ $PUSH_IMAGE -eq 1 ]] && { docker push ${DOCKER_REPO:-'local'}/assist:${image_tag} @@ -63,4 +63,6 @@ function build_api(){ check_prereq build_api $1 -[[ $PATCH -eq 1 ]] && update_helm_release assist +if [[ $PATCH -eq 1 ]]; then + update_helm_release assist +fi \ No newline at end of file diff --git a/utilities/package-lock.json b/assist/package-lock.json similarity index 99% rename from utilities/package-lock.json rename to assist/package-lock.json index aba9e43fe..683472320 100644 --- a/utilities/package-lock.json +++ b/assist/package-lock.json @@ -45,9 +45,9 @@ } }, "node_modules/@types/node": { - "version": "18.14.1", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.1.tgz", - "integrity": "sha512-QH+37Qds3E0eDlReeboBxfHbX9omAcBCXEzswCu6jySP642jiM3cYSIkU/REqwhCUqXdonHFuBfJDiAJxMNhaQ==" + "version": "18.14.6", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.6.tgz", + "integrity": "sha512-93+VvleD3mXwlLI/xASjw0FzKcwzl3OdTCzm1LaRfqgS21gfFtK3zDXM5Op9TeeMsJVOaJ2VRDpT9q4Y3d0AvA==" }, "node_modules/accepts": { "version": "1.3.8", @@ -987,9 +987,9 @@ } }, "node_modules/ua-parser-js": { - "version": "1.0.33", - "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.33.tgz", - "integrity": "sha512-RqshF7TPTE0XLYAqmjlu5cLLuGdKrNu9O1KLA/qp39QtbZwuzwv1dT46DZSopoUMsYgXpB3Cv8a03FI8b74oFQ==", + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.34.tgz", + "integrity": "sha512-K9mwJm/DaB6mRLZfw6q8IMXipcrmuT6yfhYmwhAkuh+81sChuYstYA+znlgaflUPaYUa3odxKPKGw6Vw/lANew==", "funding": [ { "type": "opencollective", diff --git a/utilities/package.json b/assist/package.json similarity index 97% rename from utilities/package.json rename to assist/package.json index b06c8cae5..ad9794fea 100644 --- a/utilities/package.json +++ b/assist/package.json @@ -1,6 +1,6 @@ { "name": "assist-server", - "version": "1.0.0", + "version": "v1.11.0", "description": "assist server to get live sessions & sourcemaps reader to get stack trace", "main": "peerjs-server.js", "scripts": { diff --git a/ee/utilities/run-dev.sh b/assist/run-dev.sh similarity index 100% rename from ee/utilities/run-dev.sh rename to assist/run-dev.sh diff --git a/utilities/server.js b/assist/server.js similarity index 81% rename from utilities/server.js rename to assist/server.js index d71aca65d..5eb6c2e16 100644 --- a/utilities/server.js +++ b/assist/server.js @@ -2,6 +2,7 @@ const dumps = require('./utils/HeapSnapshot'); const express = require('express'); const socket = require("./servers/websocket"); const {request_logger} = require("./utils/helper"); +const health = require("./utils/health"); const assert = require('assert').strict; const debug = process.env.debug === "1"; @@ -10,7 +11,7 @@ const HOST = process.env.LISTEN_HOST || '0.0.0.0'; const PORT = process.env.LISTEN_PORT || 9001; assert.ok(process.env.ASSIST_KEY, 'The "ASSIST_KEY" environment variable is required'); const P_KEY = process.env.ASSIST_KEY; -const PREFIX = process.env.PREFIX || process.env.prefix || `/assist` +const PREFIX = process.env.PREFIX || process.env.prefix || `/assist`; const wsapp = express(); wsapp.use(express.json()); @@ -27,16 +28,9 @@ heapdump && wsapp.use(`${PREFIX}/${P_KEY}/heapdump`, dumps.router); const wsserver = wsapp.listen(PORT, HOST, () => { console.log(`WS App listening on http://${HOST}:${PORT}`); - console.log('Press Ctrl+C to quit.'); + health.healthApp.listen(health.PORT, HOST, health.listen_cb); }); + wsapp.enable('trust proxy'); socket.start(wsserver); -module.exports = {wsserver}; - -wsapp.get('/private/shutdown', (req, res) => { - console.log("Requested shutdown"); - res.statusCode = 200; - res.end("ok!"); - process.kill(1, "SIGTERM"); - } -); \ No newline at end of file +module.exports = {wsserver}; \ No newline at end of file diff --git a/utilities/servers/websocket.js b/assist/servers/websocket.js similarity index 99% rename from utilities/servers/websocket.js rename to assist/servers/websocket.js index f5d029bc2..4c4a657bb 100644 --- a/utilities/servers/websocket.js +++ b/assist/servers/websocket.js @@ -26,7 +26,7 @@ const debug = process.env.debug === "1"; const createSocketIOServer = function (server, prefix) { io = _io(server, { - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6, cors: { origin: "*", methods: ["GET", "POST", "PUT"] diff --git a/utilities/utils/HeapSnapshot.js b/assist/utils/HeapSnapshot.js similarity index 100% rename from utilities/utils/HeapSnapshot.js rename to assist/utils/HeapSnapshot.js diff --git a/utilities/utils/assistHelper.js b/assist/utils/assistHelper.js similarity index 100% rename from utilities/utils/assistHelper.js rename to assist/utils/assistHelper.js diff --git a/utilities/utils/geoIP.js b/assist/utils/geoIP.js similarity index 100% rename from utilities/utils/geoIP.js rename to assist/utils/geoIP.js diff --git a/assist/utils/health.js b/assist/utils/health.js new file mode 100644 index 000000000..0b89dd1d8 --- /dev/null +++ b/assist/utils/health.js @@ -0,0 +1,52 @@ +const express = require('express'); +const HOST = process.env.LISTEN_HOST || '0.0.0.0'; +const PORT = process.env.HEALTH_PORT || 8888; + + +const {request_logger} = require("./helper"); +const debug = process.env.debug === "1"; +const respond = function (res, data) { + res.statusCode = 200; + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify({"data": data})); +} + +const check_health = async function (req, res) { + debug && console.log("[WS]looking for all available sessions"); + respond(res, { + "health": true, + "details": { + "version": process.env.npm_package_version + } + }); +} + + +const healthApp = express(); +healthApp.use(express.json()); +healthApp.use(express.urlencoded({extended: true})); +healthApp.use(request_logger("[healthApp]")); +healthApp.get(['/'], (req, res) => { + res.statusCode = 200; + res.end("healthApp ok!"); + } +); +healthApp.get('/health', check_health); +healthApp.get('/shutdown', (req, res) => { + console.log("Requested shutdown"); + res.statusCode = 200; + res.end("ok!"); + process.kill(1, "SIGTERM"); + } +); + +const listen_cb = async function () { + console.log(`Health App listening on http://${HOST}:${PORT}`); + console.log('Press Ctrl+C to quit.'); +} + +module.exports = { + healthApp, + PORT, + listen_cb +}; diff --git a/utilities/utils/helper.js b/assist/utils/helper.js similarity index 100% rename from utilities/utils/helper.js rename to assist/utils/helper.js diff --git a/backend/Dockerfile b/backend/Dockerfile index c7606559e..749900ba5 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -82,7 +82,9 @@ ENV TZ=UTC \ COMPRESSION_TYPE=zstd \ CH_USERNAME="default" \ CH_PASSWORD="" \ - CH_DATABASE="default" + CH_DATABASE="default" \ + # Max file size to process, default to 100MB + MAX_FILE_SIZE=100000000 RUN if [ "$SERVICE_NAME" = "http" ]; then \ diff --git a/backend/cmd/assets/main.go b/backend/cmd/assets/main.go index b05ecbe52..16eac7cb5 100644 --- a/backend/cmd/assets/main.go +++ b/backend/cmd/assets/main.go @@ -13,7 +13,6 @@ import ( "openreplay/backend/pkg/messages" "openreplay/backend/pkg/metrics" assetsMetrics "openreplay/backend/pkg/metrics/assets" - "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" ) @@ -24,9 +23,6 @@ func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := config.New() - if cfg.UseProfiler { - pprof.StartProfilingServer() - } cacher := cacher.NewCacher(cfg) diff --git a/backend/cmd/db/main.go b/backend/cmd/db/main.go index 84b0d81ed..82ece145f 100644 --- a/backend/cmd/db/main.go +++ b/backend/cmd/db/main.go @@ -1,174 +1,60 @@ package main import ( - "errors" "log" - "os" - "os/signal" - "syscall" - "time" - "openreplay/backend/internal/config/db" + config "openreplay/backend/internal/config/db" + "openreplay/backend/internal/db" "openreplay/backend/internal/db/datasaver" "openreplay/backend/pkg/db/cache" "openreplay/backend/pkg/db/postgres" - types2 "openreplay/backend/pkg/db/types" - "openreplay/backend/pkg/handlers" - custom2 "openreplay/backend/pkg/handlers/custom" "openreplay/backend/pkg/messages" "openreplay/backend/pkg/metrics" databaseMetrics "openreplay/backend/pkg/metrics/database" - "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" - "openreplay/backend/pkg/sessions" + "openreplay/backend/pkg/terminator" ) func main() { + log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) + m := metrics.New() m.Register(databaseMetrics.List()) - log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) - - cfg := db.New() - if cfg.UseProfiler { - pprof.StartProfilingServer() - } + cfg := config.New() // Init database pg := cache.NewPGCache( postgres.NewConn(cfg.Postgres.String(), cfg.BatchQueueLimit, cfg.BatchSizeLimit), cfg.ProjectExpirationTimeoutMs) defer pg.Close() - // HandlersFabric returns the list of message handlers we want to be applied to each incoming message. - handlersFabric := func() []handlers.MessageProcessor { - return []handlers.MessageProcessor{ - &custom2.EventMapper{}, - custom2.NewInputEventBuilder(), - custom2.NewPageEventBuilder(), - } - } - - // Create handler's aggregator - builderMap := sessions.NewBuilderMap(handlersFabric) - - // Init modules - saver := datasaver.New(pg, cfg) - saver.InitStats() + // Init data saver + saver := datasaver.New(cfg, pg) + // Message filter msgFilter := []int{messages.MsgMetadata, messages.MsgIssueEvent, messages.MsgSessionStart, messages.MsgSessionEnd, - messages.MsgUserID, messages.MsgUserAnonymousID, messages.MsgClickEvent, - messages.MsgIntegrationEvent, messages.MsgPerformanceTrackAggr, - messages.MsgJSException, messages.MsgResourceTiming, - messages.MsgCustomEvent, messages.MsgCustomIssue, messages.MsgFetch, messages.MsgNetworkRequest, messages.MsgGraphQL, - messages.MsgStateAction, messages.MsgSetInputTarget, messages.MsgSetInputValue, messages.MsgCreateDocument, - messages.MsgMouseClick, messages.MsgSetPageLocation, messages.MsgPageLoadTiming, messages.MsgPageRenderTiming} - - // Handler logic - msgHandler := func(msg messages.Message) { - // Just save session data into db without additional checks - if err := saver.InsertMessage(msg); err != nil { - if !postgres.IsPkeyViolation(err) { - log.Printf("Message Insertion Error %v, SessionID: %v, Message: %v", err, msg.SessionID(), msg) - } - return - } - - var ( - session *types2.Session - err error - ) - if msg.TypeID() == messages.MsgSessionEnd { - session, err = pg.GetSession(msg.SessionID()) - } else { - session, err = pg.Cache.GetSession(msg.SessionID()) - } - if session == nil { - if err != nil && !errors.Is(err, cache.NilSessionInCacheError) { - log.Printf("Error on session retrieving from cache: %v, SessionID: %v, Message: %v", err, msg.SessionID(), msg) - } - return - } - - // Save statistics to db - err = saver.InsertStats(session, msg) - if err != nil { - log.Printf("Stats Insertion Error %v; Session: %v, Message: %v", err, session, msg) - } - - // Handle heuristics and save to temporary queue in memory - builderMap.HandleMessage(msg) - - // Process saved heuristics messages as usual messages above in the code - builderMap.IterateSessionReadyMessages(msg.SessionID(), func(msg messages.Message) { - if err := saver.InsertMessage(msg); err != nil { - if !postgres.IsPkeyViolation(err) { - log.Printf("Message Insertion Error %v; Session: %v, Message %v", err, session, msg) - } - return - } - - if err := saver.InsertStats(session, msg); err != nil { - log.Printf("Stats Insertion Error %v; Session: %v, Message %v", err, session, msg) - } - }) - } + messages.MsgUserID, messages.MsgUserAnonymousID, messages.MsgIntegrationEvent, messages.MsgPerformanceTrackAggr, + messages.MsgJSException, messages.MsgResourceTiming, messages.MsgCustomEvent, messages.MsgCustomIssue, + messages.MsgFetch, messages.MsgNetworkRequest, messages.MsgGraphQL, messages.MsgStateAction, + messages.MsgSetInputTarget, messages.MsgSetInputValue, messages.MsgCreateDocument, messages.MsgMouseClick, + messages.MsgSetPageLocation, messages.MsgPageLoadTiming, messages.MsgPageRenderTiming, + messages.MsgInputEvent, messages.MsgPageEvent, messages.MsgMouseThrashing, messages.MsgInputChange, + messages.MsgUnbindNodes} // Init consumer consumer := queue.NewConsumer( cfg.GroupDB, []string{ - cfg.TopicRawWeb, // from tracker - cfg.TopicAnalytics, // from heuristics + cfg.TopicRawWeb, + cfg.TopicAnalytics, }, - messages.NewMessageIterator(msgHandler, msgFilter, true), + messages.NewMessageIterator(saver.Handle, msgFilter, true), false, cfg.MessageSizeLimit, ) + // Run service and wait for TERM signal + service := db.New(cfg, consumer, saver) log.Printf("Db service started\n") - - sigchan := make(chan os.Signal, 1) - signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM) - - commitTick := time.Tick(cfg.CommitBatchTimeout) - - // Send collected batches to db - commitDBUpdates := func() { - // Commit collected batches and bulks of information to PG - pg.Commit() - // Commit collected batches of information to CH - if err := saver.CommitStats(); err != nil { - log.Printf("Error on stats commit: %v", err) - } - // Commit current position in queue - if err := consumer.Commit(); err != nil { - log.Printf("Error on consumer commit: %v", err) - } - } - - for { - select { - case sig := <-sigchan: - log.Printf("Caught signal %s: terminating\n", sig.String()) - commitDBUpdates() - if err := pg.Close(); err != nil { - log.Printf("db.Close error: %s", err) - } - if err := saver.Close(); err != nil { - log.Printf("saver.Close error: %s", err) - } - consumer.Close() - os.Exit(0) - case <-commitTick: - commitDBUpdates() - builderMap.ClearOldSessions() - case msg := <-consumer.Rebalanced(): - log.Println(msg) - default: - // Handle new message from queue - if err := consumer.ConsumeNext(); err != nil { - log.Fatalf("Error on consumption: %v", err) - } - } - } + terminator.Wait(service) } diff --git a/backend/cmd/ender/main.go b/backend/cmd/ender/main.go index da7ca9b89..84d816a33 100644 --- a/backend/cmd/ender/main.go +++ b/backend/cmd/ender/main.go @@ -18,7 +18,6 @@ import ( "openreplay/backend/pkg/metrics" databaseMetrics "openreplay/backend/pkg/metrics/database" enderMetrics "openreplay/backend/pkg/metrics/ender" - "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" ) @@ -30,9 +29,6 @@ func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := ender.New() - if cfg.UseProfiler { - pprof.StartProfilingServer() - } pg := cache.NewPGCache(postgres.NewConn(cfg.Postgres.String(), 0, 0), cfg.ProjectExpirationTimeoutMs) defer pg.Close() @@ -72,12 +68,12 @@ func main() { consumer.Close() os.Exit(0) case <-tick: - failedSessionEnds := make(map[uint64]int64) + failedSessionEnds := make(map[uint64]uint64) duplicatedSessionEnds := make(map[uint64]uint64) // Find ended sessions and send notification to other services - sessions.HandleEndedSessions(func(sessionID uint64, timestamp int64) bool { - msg := &messages.SessionEnd{Timestamp: uint64(timestamp)} + sessions.HandleEndedSessions(func(sessionID uint64, timestamp uint64) bool { + msg := &messages.SessionEnd{Timestamp: timestamp} currDuration, err := pg.GetSessionDuration(sessionID) if err != nil { log.Printf("getSessionDuration failed, sessID: %d, err: %s", sessionID, err) diff --git a/backend/cmd/heuristics/main.go b/backend/cmd/heuristics/main.go index ac55b83bc..3a7abb7a0 100644 --- a/backend/cmd/heuristics/main.go +++ b/backend/cmd/heuristics/main.go @@ -2,90 +2,54 @@ package main import ( "log" - "openreplay/backend/pkg/pprof" - "os" - "os/signal" - "syscall" - "time" - - "openreplay/backend/internal/config/heuristics" + config "openreplay/backend/internal/config/heuristics" + "openreplay/backend/internal/heuristics" "openreplay/backend/pkg/handlers" - web2 "openreplay/backend/pkg/handlers/web" - "openreplay/backend/pkg/intervals" + "openreplay/backend/pkg/handlers/custom" + "openreplay/backend/pkg/handlers/web" "openreplay/backend/pkg/messages" + "openreplay/backend/pkg/metrics" + heuristicsMetrics "openreplay/backend/pkg/metrics/heuristics" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/sessions" + "openreplay/backend/pkg/terminator" ) func main() { - log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) + m := metrics.New() + m.Register(heuristicsMetrics.List()) - cfg := heuristics.New() - if cfg.UseProfiler { - pprof.StartProfilingServer() - } + log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) + cfg := config.New() // HandlersFabric returns the list of message handlers we want to be applied to each incoming message. handlersFabric := func() []handlers.MessageProcessor { return []handlers.MessageProcessor{ - // web handlers - &web2.ClickRageDetector{}, - &web2.CpuIssueDetector{}, - &web2.DeadClickDetector{}, - &web2.MemoryIssueDetector{}, - &web2.NetworkIssueDetector{}, - &web2.PerformanceAggregator{}, - // Other handlers (you can add your custom handlers here) - //&custom.CustomHandler{}, + custom.NewInputEventBuilder(), + custom.NewPageEventBuilder(), + web.NewDeadClickDetector(), + &web.ClickRageDetector{}, + &web.CpuIssueDetector{}, + &web.MemoryIssueDetector{}, + &web.NetworkIssueDetector{}, + &web.PerformanceAggregator{}, } } - // Create handler's aggregator - builderMap := sessions.NewBuilderMap(handlersFabric) - - // Init producer and consumer for data bus + eventBuilder := sessions.NewBuilderMap(handlersFabric) producer := queue.NewProducer(cfg.MessageSizeLimit, true) - - msgHandler := func(msg messages.Message) { - builderMap.HandleMessage(msg) - } - consumer := queue.NewConsumer( cfg.GroupHeuristics, []string{ cfg.TopicRawWeb, }, - messages.NewMessageIterator(msgHandler, nil, true), + messages.NewMessageIterator(eventBuilder.HandleMessage, nil, true), false, cfg.MessageSizeLimit, ) + // Run service and wait for TERM signal + service := heuristics.New(cfg, producer, consumer, eventBuilder) log.Printf("Heuristics service started\n") - - sigchan := make(chan os.Signal, 1) - signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM) - - tick := time.Tick(intervals.EVENTS_COMMIT_INTERVAL * time.Millisecond) - for { - select { - case sig := <-sigchan: - log.Printf("Caught signal %v: terminating\n", sig) - producer.Close(cfg.ProducerTimeout) - consumer.Commit() - consumer.Close() - os.Exit(0) - case <-tick: - builderMap.IterateReadyMessages(func(sessionID uint64, readyMsg messages.Message) { - producer.Produce(cfg.TopicAnalytics, sessionID, readyMsg.Encode()) - }) - producer.Flush(cfg.ProducerTimeout) - consumer.Commit() - case msg := <-consumer.Rebalanced(): - log.Println(msg) - default: - if err := consumer.ConsumeNext(); err != nil { - log.Fatalf("Error on consuming: %v", err) - } - } - } + terminator.Wait(service) } diff --git a/backend/cmd/http/main.go b/backend/cmd/http/main.go index 83eedaf29..74c58f92b 100644 --- a/backend/cmd/http/main.go +++ b/backend/cmd/http/main.go @@ -15,7 +15,6 @@ import ( "openreplay/backend/pkg/metrics" databaseMetrics "openreplay/backend/pkg/metrics/database" httpMetrics "openreplay/backend/pkg/metrics/http" - "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" ) @@ -27,9 +26,6 @@ func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := http.New() - if cfg.UseProfiler { - pprof.StartProfilingServer() - } // Connect to queue producer := queue.NewProducer(cfg.MessageSizeLimit, true) diff --git a/backend/cmd/integrations/main.go b/backend/cmd/integrations/main.go index 3fa07ee9c..c179650b9 100644 --- a/backend/cmd/integrations/main.go +++ b/backend/cmd/integrations/main.go @@ -13,7 +13,6 @@ import ( "openreplay/backend/pkg/intervals" "openreplay/backend/pkg/metrics" databaseMetrics "openreplay/backend/pkg/metrics/database" - "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/token" ) @@ -25,9 +24,6 @@ func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := config.New() - if cfg.UseProfiler { - pprof.StartProfilingServer() - } pg := postgres.NewConn(cfg.Postgres.String(), 0, 0) defer pg.Close() diff --git a/backend/cmd/sink/main.go b/backend/cmd/sink/main.go index 4bbaeeee4..e9cf1367a 100644 --- a/backend/cmd/sink/main.go +++ b/backend/cmd/sink/main.go @@ -16,7 +16,6 @@ import ( "openreplay/backend/pkg/messages" "openreplay/backend/pkg/metrics" sinkMetrics "openreplay/backend/pkg/metrics/sink" - "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/url/assets" ) @@ -27,9 +26,6 @@ func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := sink.New() - if cfg.UseProfiler { - pprof.StartProfilingServer() - } if _, err := os.Stat(cfg.FsDir); os.IsNotExist(err) { log.Fatalf("%v doesn't exist. %v", cfg.FsDir, err) @@ -112,7 +108,7 @@ func main() { log.Printf("zero ts; sessID: %d, msgType: %d", msg.SessionID(), msg.TypeID()) } else { // Log ts of last processed message - counter.Update(msg.SessionID(), time.UnixMilli(ts)) + counter.Update(msg.SessionID(), time.UnixMilli(int64(ts))) } // Try to encode message to avoid null data inserts diff --git a/backend/cmd/storage/main.go b/backend/cmd/storage/main.go index 472324b95..2a1f6a402 100644 --- a/backend/cmd/storage/main.go +++ b/backend/cmd/storage/main.go @@ -13,7 +13,6 @@ import ( "openreplay/backend/pkg/messages" "openreplay/backend/pkg/metrics" storageMetrics "openreplay/backend/pkg/metrics/storage" - "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" cloud "openreplay/backend/pkg/storage" ) @@ -25,9 +24,6 @@ func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := config.New() - if cfg.UseProfiler { - pprof.StartProfilingServer() - } s3 := cloud.NewS3(cfg.S3Region, cfg.S3Bucket) srv, err := storage.New(cfg, s3) diff --git a/backend/go.mod b/backend/go.mod index 9633f2b18..4e6647a02 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -24,7 +24,7 @@ require ( github.com/sethvargo/go-envconfig v0.7.0 github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce github.com/ua-parser/uap-go v0.0.0-20200325213135-e1c09f13e2fe - golang.org/x/net v0.1.1-0.20221104162952-702349b0e862 + golang.org/x/net v0.8.0 google.golang.org/api v0.81.0 ) @@ -61,8 +61,8 @@ require ( golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4 // indirect golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 // indirect golang.org/x/sync v0.0.0-20220513210516-0976fa681c29 // indirect - golang.org/x/sys v0.1.0 // indirect - golang.org/x/text v0.7.0 // indirect + golang.org/x/sys v0.6.0 // indirect + golang.org/x/text v0.8.0 // indirect golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd // indirect diff --git a/backend/go.sum b/backend/go.sum index 676cf479b..d9c07010c 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -589,8 +589,8 @@ golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.1.1-0.20221104162952-702349b0e862 h1:KrLJ+iz8J6j6VVr/OCfULAcK+xozUmWE43fKpMR4MlI= -golang.org/x/net v0.1.1-0.20221104162952-702349b0e862/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= +golang.org/x/net v0.8.0 h1:Zrh2ngAOFYneWTAIAPethzeaQLuHwhuBkuV6ZiRnUaQ= +golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -702,8 +702,8 @@ golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220429233432-b5fbb4746d32/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.1.0 h1:kunALQeHf1/185U1i0GOB/fy1IPRDDpuoOOqRReG57U= -golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0 h1:MVltZSvRTcU2ljQOhs94SXPftV6DCNnZViHeQps87pQ= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -715,8 +715,8 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= -golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.8.0 h1:57P1ETyNKtuIjB4SRd15iJxuhj8Gc416Y78H3qgMh68= +golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= diff --git a/backend/internal/assets/cacher/cacher.go b/backend/internal/assets/cacher/cacher.go index 4b0353a9a..513d40c47 100644 --- a/backend/internal/assets/cacher/cacher.go +++ b/backend/internal/assets/cacher/cacher.go @@ -2,9 +2,11 @@ package cacher import ( "crypto/tls" + "crypto/x509" "fmt" "io" "io/ioutil" + "log" "mime" "net/http" metrics "openreplay/backend/pkg/metrics/assets" @@ -38,6 +40,35 @@ func (c *cacher) CanCache() bool { func NewCacher(cfg *config.Config) *cacher { rewriter := assets.NewRewriter(cfg.AssetsOrigin) + + tlsConfig := &tls.Config{ + InsecureSkipVerify: true, + } + + if cfg.ClientCertFilePath != "" && cfg.ClientKeyFilePath != "" && cfg.CaCertFilePath != "" { + + var cert tls.Certificate + var err error + + cert, err = tls.LoadX509KeyPair(cfg.ClientCertFilePath, cfg.ClientKeyFilePath) + if err != nil { + log.Fatalf("Error creating x509 keypair from the client cert file %s and client key file %s , Error: %s", err, cfg.ClientCertFilePath, cfg.ClientKeyFilePath) + } + + caCert, err := ioutil.ReadFile(cfg.CaCertFilePath) + if err != nil { + log.Fatalf("Error opening cert file %s, Error: %s", cfg.CaCertFilePath, err) + } + caCertPool := x509.NewCertPool() + caCertPool.AppendCertsFromPEM(caCert) + tlsConfig = &tls.Config{ + InsecureSkipVerify: true, + Certificates: []tls.Certificate{cert}, + RootCAs: caCertPool, + } + + } + c := &cacher{ timeoutMap: newTimeoutMap(), s3: storage.NewS3(cfg.AWSRegion, cfg.S3BucketAssets), @@ -45,7 +76,7 @@ func NewCacher(cfg *config.Config) *cacher { Timeout: time.Duration(6) * time.Second, Transport: &http.Transport{ Proxy: http.ProxyFromEnvironment, - TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + TLSClientConfig: tlsConfig, }, }, rewriter: rewriter, @@ -104,6 +135,13 @@ func (c *cacher) cacheURL(t *Task) { if contentType == "" { contentType = mime.TypeByExtension(filepath.Ext(res.Request.URL.Path)) } + + // Skip html file (usually it's a CDN mock for 404 error) + if strings.HasPrefix(contentType, "text/html") { + c.Errors <- errors.Wrap(fmt.Errorf("context type is text/html, sessID: %d", t.sessionID), t.urlContext) + return + } + isCSS := strings.HasPrefix(contentType, "text/css") strData := string(data) diff --git a/backend/internal/config/assets/config.go b/backend/internal/config/assets/config.go index 399ee84f4..19c747e71 100644 --- a/backend/internal/config/assets/config.go +++ b/backend/internal/config/assets/config.go @@ -15,6 +15,9 @@ type Config struct { AssetsSizeLimit int `env:"ASSETS_SIZE_LIMIT,required"` AssetsRequestHeaders map[string]string `env:"ASSETS_REQUEST_HEADERS"` UseProfiler bool `env:"PROFILER_ENABLED,default=false"` + ClientKeyFilePath string `env:"CLIENT_KEY_FILE_PATH"` + CaCertFilePath string `env:"CA_CERT_FILE_PATH"` + ClientCertFilePath string `env:"CLIENT_CERT_FILE_PATH"` } func New() *Config { diff --git a/backend/internal/config/heuristics/config.go b/backend/internal/config/heuristics/config.go index 6552944a3..d222387c5 100644 --- a/backend/internal/config/heuristics/config.go +++ b/backend/internal/config/heuristics/config.go @@ -3,6 +3,7 @@ package heuristics import ( "openreplay/backend/internal/config/common" "openreplay/backend/internal/config/configurator" + "openreplay/backend/pkg/pprof" ) type Config struct { @@ -19,5 +20,8 @@ type Config struct { func New() *Config { cfg := &Config{} configurator.Process(cfg) + if cfg.UseProfiler { + pprof.StartProfilingServer() + } return cfg } diff --git a/backend/internal/db/datasaver/messages.go b/backend/internal/db/datasaver/messages.go deleted file mode 100644 index 12e7152b4..000000000 --- a/backend/internal/db/datasaver/messages.go +++ /dev/null @@ -1,74 +0,0 @@ -package datasaver - -import ( - "fmt" - . "openreplay/backend/pkg/messages" -) - -func (mi *Saver) InsertMessage(msg Message) error { - sessionID := msg.SessionID() - switch m := msg.(type) { - // Common - case *Metadata: - if err := mi.pg.InsertMetadata(sessionID, m); err != nil { - return fmt.Errorf("insert metadata err: %s", err) - } - return nil - case *IssueEvent: - return mi.pg.InsertIssueEvent(sessionID, m) - //TODO: message adapter (transformer) (at the level of pkg/message) for types: *IOSMetadata, *IOSIssueEvent and others - - // Web - case *SessionStart: - return mi.pg.HandleWebSessionStart(sessionID, m) - case *SessionEnd: - return mi.pg.HandleWebSessionEnd(sessionID, m) - case *UserID: - return mi.pg.InsertWebUserID(sessionID, m) - case *UserAnonymousID: - return mi.pg.InsertWebUserAnonymousID(sessionID, m) - case *CustomEvent: - return mi.pg.InsertWebCustomEvent(sessionID, m) - case *ClickEvent: - return mi.pg.InsertWebClickEvent(sessionID, m) - case *InputEvent: - return mi.pg.InsertWebInputEvent(sessionID, m) - - // Unique Web messages - case *PageEvent: - return mi.pg.InsertWebPageEvent(sessionID, m) - case *NetworkRequest: - return mi.pg.InsertWebNetworkRequest(sessionID, m) - case *GraphQL: - return mi.pg.InsertWebGraphQL(sessionID, m) - case *JSException: - return mi.pg.InsertWebJSException(m) - case *IntegrationEvent: - return mi.pg.InsertWebIntegrationEvent(m) - - // IOS - case *IOSSessionStart: - return mi.pg.InsertIOSSessionStart(sessionID, m) - case *IOSSessionEnd: - return mi.pg.InsertIOSSessionEnd(sessionID, m) - case *IOSUserID: - return mi.pg.InsertIOSUserID(sessionID, m) - case *IOSUserAnonymousID: - return mi.pg.InsertIOSUserAnonymousID(sessionID, m) - case *IOSCustomEvent: - return mi.pg.InsertIOSCustomEvent(sessionID, m) - case *IOSClickEvent: - return mi.pg.InsertIOSClickEvent(sessionID, m) - case *IOSInputEvent: - return mi.pg.InsertIOSInputEvent(sessionID, m) - // Unique IOS messages - case *IOSNetworkCall: - return mi.pg.InsertIOSNetworkCall(sessionID, m) - case *IOSScreenEnter: - return mi.pg.InsertIOSScreenEnter(sessionID, m) - case *IOSCrash: - return mi.pg.InsertIOSCrash(sessionID, m) - - } - return nil // "Not implemented" -} diff --git a/backend/internal/db/datasaver/methods.go b/backend/internal/db/datasaver/methods.go new file mode 100644 index 000000000..c4e83cf09 --- /dev/null +++ b/backend/internal/db/datasaver/methods.go @@ -0,0 +1,19 @@ +package datasaver + +import ( + . "openreplay/backend/pkg/messages" +) + +func (s *saverImpl) init() { + // noop +} + +func (s *saverImpl) handleExtraMessage(msg Message) error { + switch m := msg.(type) { + case *PerformanceTrackAggr: + return s.pg.InsertWebStatsPerformance(m) + case *ResourceTiming: + return s.pg.InsertWebStatsResourceEvent(m) + } + return nil +} diff --git a/backend/internal/db/datasaver/saver.go b/backend/internal/db/datasaver/saver.go index 2a356d120..1a017fa6f 100644 --- a/backend/internal/db/datasaver/saver.go +++ b/backend/internal/db/datasaver/saver.go @@ -1,16 +1,130 @@ package datasaver import ( + "log" + "openreplay/backend/internal/config/db" "openreplay/backend/pkg/db/cache" - "openreplay/backend/pkg/queue/types" + "openreplay/backend/pkg/db/clickhouse" + "openreplay/backend/pkg/db/postgres" + "openreplay/backend/pkg/db/types" + . "openreplay/backend/pkg/messages" + queue "openreplay/backend/pkg/queue/types" ) -type Saver struct { - pg *cache.PGCache - producer types.Producer +type Saver interface { + Handle(msg Message) + Commit() error + Close() error } -func New(pg *cache.PGCache, _ *db.Config) *Saver { - return &Saver{pg: pg, producer: nil} +type saverImpl struct { + cfg *db.Config + pg *cache.PGCache + ch clickhouse.Connector + producer queue.Producer +} + +func New(cfg *db.Config, pg *cache.PGCache) Saver { + s := &saverImpl{cfg: cfg, pg: pg} + s.init() + return s +} + +func (s *saverImpl) Handle(msg Message) { + if msg.TypeID() == MsgCustomEvent { + defer s.Handle(types.WrapCustomEvent(msg.(*CustomEvent))) + } + if err := s.handleMessage(msg); err != nil { + if !postgres.IsPkeyViolation(err) { + log.Printf("Message Insertion Error %v, SessionID: %v, Message: %v", err, msg.SessionID(), msg) + } + return + } + if err := s.handleExtraMessage(msg); err != nil { + log.Printf("Stats Insertion Error %v; Session: %d, Message: %v", err, msg.SessionID(), msg) + } + return +} + +func (s *saverImpl) handleMessage(msg Message) error { + switch m := msg.(type) { + case *Metadata: + return s.pg.InsertMetadata(m) + case *IssueEvent: + return s.pg.InsertIssueEvent(m) + case *SessionStart: + return s.pg.HandleWebSessionStart(m) + case *SessionEnd: + return s.pg.HandleWebSessionEnd(m) + case *UserID: + return s.pg.InsertWebUserID(m) + case *UserAnonymousID: + return s.pg.InsertWebUserAnonymousID(m) + case *CustomEvent: + return s.pg.InsertWebCustomEvent(m) + case *MouseClick: + return s.pg.InsertWebClickEvent(m) + case *InputEvent: + return s.pg.InsertWebInputEvent(m) + case *PageEvent: + return s.pg.InsertWebPageEvent(m) + case *NetworkRequest: + return s.pg.InsertWebNetworkRequest(m) + case *GraphQL: + return s.pg.InsertWebGraphQL(m) + case *JSException: + return s.pg.InsertWebJSException(m) + case *IntegrationEvent: + return s.pg.InsertWebIntegrationEvent(m) + case *InputChange: + return s.pg.InsertWebInputDuration(m) + case *MouseThrashing: + return s.pg.InsertMouseThrashing(m) + case *IOSSessionStart: + return s.pg.InsertIOSSessionStart(m) + case *IOSSessionEnd: + return s.pg.InsertIOSSessionEnd(m) + case *IOSUserID: + return s.pg.InsertIOSUserID(m) + case *IOSUserAnonymousID: + return s.pg.InsertIOSUserAnonymousID(m) + case *IOSCustomEvent: + return s.pg.InsertIOSCustomEvent(m) + case *IOSClickEvent: + return s.pg.InsertIOSClickEvent(m) + case *IOSInputEvent: + return s.pg.InsertIOSInputEvent(m) + case *IOSNetworkCall: + return s.pg.InsertIOSNetworkCall(m) + case *IOSScreenEnter: + return s.pg.InsertIOSScreenEnter(m) + case *IOSCrash: + return s.pg.InsertIOSCrash(m) + } + return nil +} + +func (s *saverImpl) Commit() error { + if s.pg != nil { + s.pg.Commit() + } + if s.ch != nil { + s.ch.Commit() + } + return nil +} + +func (s *saverImpl) Close() error { + if s.pg != nil { + if err := s.pg.Close(); err != nil { + log.Printf("pg.Close error: %s", err) + } + } + if s.ch != nil { + if err := s.ch.Stop(); err != nil { + log.Printf("ch.Close error: %s", err) + } + } + return nil } diff --git a/backend/internal/db/datasaver/stats.go b/backend/internal/db/datasaver/stats.go deleted file mode 100644 index c7daeb3dc..000000000 --- a/backend/internal/db/datasaver/stats.go +++ /dev/null @@ -1,29 +0,0 @@ -package datasaver - -import ( - . "openreplay/backend/pkg/db/types" - . "openreplay/backend/pkg/messages" -) - -func (si *Saver) InitStats() { - // noop -} - -func (si *Saver) InsertStats(session *Session, msg Message) error { - switch m := msg.(type) { - // Web - case *PerformanceTrackAggr: - return si.pg.InsertWebStatsPerformance(session.SessionID, m) - case *ResourceEvent: - return si.pg.InsertWebStatsResourceEvent(session.SessionID, m) - } - return nil -} - -func (si *Saver) CommitStats() error { - return nil -} - -func (si *Saver) Close() error { - return nil -} diff --git a/backend/internal/db/service.go b/backend/internal/db/service.go new file mode 100644 index 000000000..69b5cb1cb --- /dev/null +++ b/backend/internal/db/service.go @@ -0,0 +1,56 @@ +package db + +import ( + "log" + "time" + + "openreplay/backend/internal/config/db" + "openreplay/backend/internal/db/datasaver" + "openreplay/backend/internal/service" + "openreplay/backend/pkg/queue/types" +) + +type dbImpl struct { + cfg *db.Config + consumer types.Consumer + saver datasaver.Saver +} + +func New(cfg *db.Config, consumer types.Consumer, saver datasaver.Saver) service.Interface { + s := &dbImpl{ + cfg: cfg, + consumer: consumer, + saver: saver, + } + go s.run() + return s +} + +func (d *dbImpl) run() { + commitTick := time.Tick(d.cfg.CommitBatchTimeout) + for { + select { + case <-commitTick: + d.commit() + case msg := <-d.consumer.Rebalanced(): + log.Println(msg) + default: + if err := d.consumer.ConsumeNext(); err != nil { + log.Fatalf("Error on consumption: %v", err) + } + } + } +} + +func (d *dbImpl) commit() { + d.saver.Commit() + d.consumer.Commit() +} + +func (d *dbImpl) Stop() { + d.commit() + if err := d.saver.Close(); err != nil { + log.Printf("saver.Close error: %s", err) + } + d.consumer.Close() +} diff --git a/backend/internal/heuristics/service.go b/backend/internal/heuristics/service.go new file mode 100644 index 000000000..44b4034e2 --- /dev/null +++ b/backend/internal/heuristics/service.go @@ -0,0 +1,87 @@ +package heuristics + +import ( + "fmt" + "log" + "openreplay/backend/pkg/messages" + metrics "openreplay/backend/pkg/metrics/heuristics" + "time" + + "openreplay/backend/internal/config/heuristics" + "openreplay/backend/internal/service" + "openreplay/backend/pkg/queue/types" + "openreplay/backend/pkg/sessions" +) + +type heuristicsImpl struct { + cfg *heuristics.Config + producer types.Producer + consumer types.Consumer + events sessions.EventBuilder +} + +func New(cfg *heuristics.Config, p types.Producer, c types.Consumer, e sessions.EventBuilder) service.Interface { + s := &heuristicsImpl{ + cfg: cfg, + producer: p, + consumer: c, + events: e, + } + go s.run() + return s +} + +func (h *heuristicsImpl) run() { + tick := time.Tick(10 * time.Second) + for { + select { + case evt := <-h.events.Events(): + if err := h.producer.Produce(h.cfg.TopicAnalytics, evt.SessionID(), evt.Encode()); err != nil { + log.Printf("can't send new event to queue: %s", err) + } else { + metrics.IncreaseTotalEvents(messageTypeName(evt)) + } + case <-tick: + h.producer.Flush(h.cfg.ProducerTimeout) + h.consumer.Commit() + case msg := <-h.consumer.Rebalanced(): + log.Println(msg) + default: + if err := h.consumer.ConsumeNext(); err != nil { + log.Fatalf("Error on consuming: %v", err) + } + } + } +} + +func (h *heuristicsImpl) Stop() { + // Stop event builder and flush all events + log.Println("stopping heuristics service") + h.events.Stop() + for evt := range h.events.Events() { + if err := h.producer.Produce(h.cfg.TopicAnalytics, evt.SessionID(), evt.Encode()); err != nil { + log.Printf("can't send new event to queue: %s", err) + } + } + h.producer.Close(h.cfg.ProducerTimeout) + h.consumer.Commit() + h.consumer.Close() +} + +func messageTypeName(msg messages.Message) string { + switch msg.TypeID() { + case 31: + return "PageEvent" + case 32: + return "InputEvent" + case 56: + return "PerformanceTrackAggr" + case 69: + return "MouseClick" + case 125: + m := msg.(*messages.IssueEvent) + return fmt.Sprintf("IssueEvent(%s)", m.Type) + default: + return "unknown" + } +} diff --git a/backend/internal/service/service.go b/backend/internal/service/service.go new file mode 100644 index 000000000..a20254093 --- /dev/null +++ b/backend/internal/service/service.go @@ -0,0 +1,5 @@ +package service + +type Interface interface { + Stop() +} diff --git a/backend/internal/sessionender/ender.go b/backend/internal/sessionender/ender.go index e1ddb0ffe..26fcf850e 100644 --- a/backend/internal/sessionender/ender.go +++ b/backend/internal/sessionender/ender.go @@ -9,13 +9,13 @@ import ( ) // EndedSessionHandler handler for ended sessions -type EndedSessionHandler func(sessionID uint64, timestamp int64) bool +type EndedSessionHandler func(sessionID uint64, timestamp uint64) bool // session holds information about user's session live status type session struct { lastTimestamp int64 lastUpdate int64 - lastUserTime int64 + lastUserTime uint64 isEnded bool } diff --git a/backend/internal/storage/storage.go b/backend/internal/storage/storage.go index 1e2507163..b1e6b21fb 100644 --- a/backend/internal/storage/storage.go +++ b/backend/internal/storage/storage.go @@ -95,6 +95,7 @@ func (s *Storage) Upload(msg *messages.SessionEnd) (err error) { if err != nil { if strings.Contains(err.Error(), "big file") { log.Printf("%s, sess: %d", err, msg.SessionID()) + metrics.IncreaseStorageTotalSkippedSessions() return nil } return err @@ -110,6 +111,7 @@ func (s *Storage) openSession(filePath string, tp FileType) ([]byte, error) { // Check file size before download into memory info, err := os.Stat(filePath) if err == nil && info.Size() > s.cfg.MaxFileSize { + metrics.RecordSkippedSessionSize(float64(info.Size()), tp.String()) return nil, fmt.Errorf("big file, size: %d", info.Size()) } // Read file into memory diff --git a/backend/pkg/db/cache/messages-common.go b/backend/pkg/db/cache/messages-common.go index 3fc52f395..763f97d90 100644 --- a/backend/pkg/db/cache/messages-common.go +++ b/backend/pkg/db/cache/messages-common.go @@ -21,7 +21,8 @@ func (c *PGCache) HandleSessionEnd(sessionID uint64) error { return nil } -func (c *PGCache) InsertIssueEvent(sessionID uint64, crash *IssueEvent) error { +func (c *PGCache) InsertIssueEvent(crash *IssueEvent) error { + sessionID := crash.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err @@ -29,7 +30,8 @@ func (c *PGCache) InsertIssueEvent(sessionID uint64, crash *IssueEvent) error { return c.Conn.InsertIssueEvent(sessionID, session.ProjectID, crash) } -func (c *PGCache) InsertMetadata(sessionID uint64, metadata *Metadata) error { +func (c *PGCache) InsertMetadata(metadata *Metadata) error { + sessionID := metadata.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err diff --git a/backend/pkg/db/cache/messages-ios.go b/backend/pkg/db/cache/messages-ios.go index 961b78dad..93367f925 100644 --- a/backend/pkg/db/cache/messages-ios.go +++ b/backend/pkg/db/cache/messages-ios.go @@ -6,7 +6,8 @@ import ( . "openreplay/backend/pkg/messages" ) -func (c *PGCache) InsertIOSSessionStart(sessionID uint64, s *IOSSessionStart) error { +func (c *PGCache) InsertIOSSessionStart(s *IOSSessionStart) error { + sessionID := s.SessionID() if c.Cache.HasSession(sessionID) { return fmt.Errorf("session %d already in cache", sessionID) } @@ -33,13 +34,15 @@ func (c *PGCache) InsertIOSSessionStart(sessionID uint64, s *IOSSessionStart) er return nil } -func (c *PGCache) InsertIOSSessionEnd(sessionID uint64, e *IOSSessionEnd) error { +func (c *PGCache) InsertIOSSessionEnd(e *IOSSessionEnd) error { + sessionID := e.SessionID() _, err := c.InsertSessionEnd(sessionID, e.Timestamp) return err } -func (c *PGCache) InsertIOSScreenEnter(sessionID uint64, screenEnter *IOSScreenEnter) error { - if err := c.Conn.InsertIOSScreenEnter(sessionID, screenEnter); err != nil { +func (c *PGCache) InsertIOSScreenEnter(screenEnter *IOSScreenEnter) error { + sessionID := screenEnter.SessionID() + if err := c.Conn.InsertIOSScreenEnter(screenEnter); err != nil { return err } session, err := c.Cache.GetSession(sessionID) @@ -50,8 +53,9 @@ func (c *PGCache) InsertIOSScreenEnter(sessionID uint64, screenEnter *IOSScreenE return nil } -func (c *PGCache) InsertIOSClickEvent(sessionID uint64, clickEvent *IOSClickEvent) error { - if err := c.Conn.InsertIOSClickEvent(sessionID, clickEvent); err != nil { +func (c *PGCache) InsertIOSClickEvent(clickEvent *IOSClickEvent) error { + sessionID := clickEvent.SessionID() + if err := c.Conn.InsertIOSClickEvent(clickEvent); err != nil { return err } session, err := c.Cache.GetSession(sessionID) @@ -62,8 +66,9 @@ func (c *PGCache) InsertIOSClickEvent(sessionID uint64, clickEvent *IOSClickEven return nil } -func (c *PGCache) InsertIOSInputEvent(sessionID uint64, inputEvent *IOSInputEvent) error { - if err := c.Conn.InsertIOSInputEvent(sessionID, inputEvent); err != nil { +func (c *PGCache) InsertIOSInputEvent(inputEvent *IOSInputEvent) error { + sessionID := inputEvent.SessionID() + if err := c.Conn.InsertIOSInputEvent(inputEvent); err != nil { return err } session, err := c.Cache.GetSession(sessionID) @@ -74,18 +79,15 @@ func (c *PGCache) InsertIOSInputEvent(sessionID uint64, inputEvent *IOSInputEven return nil } -func (c *PGCache) InsertIOSCrash(sessionID uint64, crash *IOSCrash) error { +func (c *PGCache) InsertIOSCrash(crash *IOSCrash) error { + sessionID := crash.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err } - if err := c.Conn.InsertIOSCrash(sessionID, session.ProjectID, crash); err != nil { + if err := c.Conn.InsertIOSCrash(session.ProjectID, crash); err != nil { return err } session.ErrorsCount += 1 return nil } - -func (c *PGCache) InsertIOSIssueEvent(sessionID uint64, issueEvent *IOSIssueEvent) error { - return nil -} diff --git a/backend/pkg/db/cache/messages-web.go b/backend/pkg/db/cache/messages-web.go index 1df3d1520..58c703318 100644 --- a/backend/pkg/db/cache/messages-web.go +++ b/backend/pkg/db/cache/messages-web.go @@ -30,7 +30,8 @@ func (c *PGCache) InsertWebSessionStart(sessionID uint64, s *SessionStart) error }) } -func (c *PGCache) HandleWebSessionStart(sessionID uint64, s *SessionStart) error { +func (c *PGCache) HandleWebSessionStart(s *SessionStart) error { + sessionID := s.SessionID() if c.Cache.HasSession(sessionID) { return fmt.Errorf("session %d already in cache", sessionID) } @@ -69,7 +70,8 @@ func (c *PGCache) InsertWebSessionEnd(sessionID uint64, e *SessionEnd) error { return err } -func (c *PGCache) HandleWebSessionEnd(sessionID uint64, e *SessionEnd) error { +func (c *PGCache) HandleWebSessionEnd(e *SessionEnd) error { + sessionID := e.SessionID() return c.HandleSessionEnd(sessionID) } @@ -99,7 +101,8 @@ func (c *PGCache) InsertSessionReferrer(sessionID uint64, referrer string) error return c.Conn.InsertSessionReferrer(sessionID, referrer) } -func (c *PGCache) InsertWebNetworkRequest(sessionID uint64, e *NetworkRequest) error { +func (c *PGCache) InsertWebNetworkRequest(e *NetworkRequest) error { + sessionID := e.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err @@ -111,7 +114,8 @@ func (c *PGCache) InsertWebNetworkRequest(sessionID uint64, e *NetworkRequest) e return c.Conn.InsertWebNetworkRequest(sessionID, session.ProjectID, project.SaveRequestPayloads, e) } -func (c *PGCache) InsertWebGraphQL(sessionID uint64, e *GraphQL) error { +func (c *PGCache) InsertWebGraphQL(e *GraphQL) error { + sessionID := e.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err @@ -123,7 +127,8 @@ func (c *PGCache) InsertWebGraphQL(sessionID uint64, e *GraphQL) error { return c.Conn.InsertWebGraphQL(sessionID, session.ProjectID, project.SaveRequestPayloads, e) } -func (c *PGCache) InsertWebCustomEvent(sessionID uint64, e *CustomEvent) error { +func (c *PGCache) InsertWebCustomEvent(e *CustomEvent) error { + sessionID := e.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err @@ -131,7 +136,8 @@ func (c *PGCache) InsertWebCustomEvent(sessionID uint64, e *CustomEvent) error { return c.Conn.InsertWebCustomEvent(sessionID, session.ProjectID, e) } -func (c *PGCache) InsertWebUserID(sessionID uint64, userID *UserID) error { +func (c *PGCache) InsertWebUserID(userID *UserID) error { + sessionID := userID.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err @@ -139,7 +145,8 @@ func (c *PGCache) InsertWebUserID(sessionID uint64, userID *UserID) error { return c.Conn.InsertWebUserID(sessionID, session.ProjectID, userID) } -func (c *PGCache) InsertWebUserAnonymousID(sessionID uint64, userAnonymousID *UserAnonymousID) error { +func (c *PGCache) InsertWebUserAnonymousID(userAnonymousID *UserAnonymousID) error { + sessionID := userAnonymousID.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err @@ -147,7 +154,8 @@ func (c *PGCache) InsertWebUserAnonymousID(sessionID uint64, userAnonymousID *Us return c.Conn.InsertWebUserAnonymousID(sessionID, session.ProjectID, userAnonymousID) } -func (c *PGCache) InsertWebPageEvent(sessionID uint64, e *PageEvent) error { +func (c *PGCache) InsertWebPageEvent(e *PageEvent) error { + sessionID := e.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err @@ -155,7 +163,8 @@ func (c *PGCache) InsertWebPageEvent(sessionID uint64, e *PageEvent) error { return c.Conn.InsertWebPageEvent(sessionID, session.ProjectID, e) } -func (c *PGCache) InsertWebClickEvent(sessionID uint64, e *ClickEvent) error { +func (c *PGCache) InsertWebClickEvent(e *MouseClick) error { + sessionID := e.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err @@ -163,10 +172,29 @@ func (c *PGCache) InsertWebClickEvent(sessionID uint64, e *ClickEvent) error { return c.Conn.InsertWebClickEvent(sessionID, session.ProjectID, e) } -func (c *PGCache) InsertWebInputEvent(sessionID uint64, e *InputEvent) error { +func (c *PGCache) InsertWebInputEvent(e *InputEvent) error { + sessionID := e.SessionID() session, err := c.Cache.GetSession(sessionID) if err != nil { return err } return c.Conn.InsertWebInputEvent(sessionID, session.ProjectID, e) } + +func (c *PGCache) InsertWebInputDuration(e *InputChange) error { + sessionID := e.SessionID() + session, err := c.Cache.GetSession(sessionID) + if err != nil { + return err + } + return c.Conn.InsertWebInputDuration(sessionID, session.ProjectID, e) +} + +func (c *PGCache) InsertMouseThrashing(e *MouseThrashing) error { + sessionID := e.SessionID() + session, err := c.Cache.GetSession(sessionID) + if err != nil { + return err + } + return c.Conn.InsertMouseThrashing(sessionID, session.ProjectID, e) +} diff --git a/backend/pkg/db/clickhouse/connector.go b/backend/pkg/db/clickhouse/connector.go new file mode 100644 index 000000000..1d3a3b4f5 --- /dev/null +++ b/backend/pkg/db/clickhouse/connector.go @@ -0,0 +1,24 @@ +package clickhouse + +import ( + "openreplay/backend/pkg/db/types" + "openreplay/backend/pkg/messages" +) + +type Connector interface { + Prepare() error + Commit() error + Stop() error + InsertWebSession(session *types.Session) error + InsertWebResourceEvent(session *types.Session, msg *messages.ResourceTiming) error + InsertWebPageEvent(session *types.Session, msg *messages.PageEvent) error + InsertWebClickEvent(session *types.Session, msg *messages.MouseClick) error + InsertWebInputEvent(session *types.Session, msg *messages.InputEvent) error + InsertWebErrorEvent(session *types.Session, msg *types.ErrorEvent) error + InsertWebPerformanceTrackAggr(session *types.Session, msg *messages.PerformanceTrackAggr) error + InsertAutocomplete(session *types.Session, msgType, msgValue string) error + InsertRequest(session *types.Session, msg *messages.NetworkRequest, savePayload bool) error + InsertCustom(session *types.Session, msg *messages.CustomEvent) error + InsertGraphQL(session *types.Session, msg *messages.GraphQL) error + InsertIssue(session *types.Session, msg *messages.IssueEvent) error +} diff --git a/backend/pkg/db/postgres/batches.go b/backend/pkg/db/postgres/batches.go index 8b9f2484d..bf4d1745c 100644 --- a/backend/pkg/db/postgres/batches.go +++ b/backend/pkg/db/postgres/batches.go @@ -193,9 +193,7 @@ func (conn *BatchSet) worker() { for { select { case t := <-conn.workerTask: - start := time.Now() conn.sendBatches(t) - log.Printf("pg batches dur: %d", time.Now().Sub(start).Milliseconds()) case <-conn.done: if len(conn.workerTask) > 0 { for t := range conn.workerTask { diff --git a/backend/pkg/db/postgres/bulks.go b/backend/pkg/db/postgres/bulks.go index f3e9e95c9..7eaba41b4 100644 --- a/backend/pkg/db/postgres/bulks.go +++ b/backend/pkg/db/postgres/bulks.go @@ -2,7 +2,6 @@ package postgres import ( "log" - "time" ) type bulksTask struct { @@ -10,7 +9,7 @@ type bulksTask struct { } func NewBulksTask() *bulksTask { - return &bulksTask{bulks: make([]Bulk, 0, 14)} + return &bulksTask{bulks: make([]Bulk, 0, 15)} } type BulkSet struct { @@ -20,6 +19,7 @@ type BulkSet struct { customEvents Bulk webPageEvents Bulk webInputEvents Bulk + webInputDurations Bulk webGraphQL Bulk webErrors Bulk webErrorEvents Bulk @@ -58,6 +58,8 @@ func (conn *BulkSet) Get(name string) Bulk { return conn.webPageEvents case "webInputEvents": return conn.webInputEvents + case "webInputDurations": + return conn.webInputDurations case "webGraphQL": return conn.webGraphQL case "webErrors": @@ -127,6 +129,14 @@ func (conn *BulkSet) initBulks() { if err != nil { log.Fatalf("can't create webPageEvents bulk: %s", err) } + conn.webInputDurations, err = NewBulk(conn.c, + "events.inputs", + "(session_id, message_id, timestamp, value, label, hesitation, duration)", + "($%d, $%d, $%d, LEFT($%d, 2000), NULLIF(LEFT($%d, 2000),''), $%d, $%d)", + 7, 200) + if err != nil { + log.Fatalf("can't create webPageEvents bulk: %s", err) + } conn.webGraphQL, err = NewBulk(conn.c, "events.graphql", "(session_id, timestamp, message_id, name, request_body, response_body)", @@ -185,9 +195,9 @@ func (conn *BulkSet) initBulks() { } conn.webClickEvents, err = NewBulk(conn.c, "events.clicks", - "(session_id, message_id, timestamp, label, selector, url, path)", - "($%d, $%d, $%d, NULLIF(LEFT($%d, 2000), ''), LEFT($%d, 8000), LEFT($%d, 2000), LEFT($%d, 2000))", - 7, 200) + "(session_id, message_id, timestamp, label, selector, url, path, hesitation)", + "($%d, $%d, $%d, NULLIF(LEFT($%d, 2000), ''), LEFT($%d, 8000), LEFT($%d, 2000), LEFT($%d, 2000), $%d)", + 8, 200) if err != nil { log.Fatalf("can't create webClickEvents bulk: %s", err) } @@ -210,6 +220,7 @@ func (conn *BulkSet) Send() { newTask.bulks = append(newTask.bulks, conn.customEvents) newTask.bulks = append(newTask.bulks, conn.webPageEvents) newTask.bulks = append(newTask.bulks, conn.webInputEvents) + newTask.bulks = append(newTask.bulks, conn.webInputDurations) newTask.bulks = append(newTask.bulks, conn.webGraphQL) newTask.bulks = append(newTask.bulks, conn.webErrors) newTask.bulks = append(newTask.bulks, conn.webErrorEvents) @@ -243,9 +254,7 @@ func (conn *BulkSet) worker() { for { select { case t := <-conn.workerTask: - start := time.Now() conn.sendBulks(t) - log.Printf("pg bulks dur: %d", time.Now().Sub(start).Milliseconds()) case <-conn.done: if len(conn.workerTask) > 0 { for t := range conn.workerTask { diff --git a/backend/pkg/db/postgres/connector.go b/backend/pkg/db/postgres/connector.go index 6904dc135..be748e6a2 100644 --- a/backend/pkg/db/postgres/connector.go +++ b/backend/pkg/db/postgres/connector.go @@ -17,7 +17,7 @@ type Conn struct { c Pool batches *BatchSet bulks *BulkSet - chConn CH + chConn CH // hack for autocomplete inserts, TODO: rewrite } func (conn *Conn) SetClickHouse(ch CH) { diff --git a/backend/pkg/db/postgres/messages-ios.go b/backend/pkg/db/postgres/messages-ios.go index 027cfc968..ace1955f5 100644 --- a/backend/pkg/db/postgres/messages-ios.go +++ b/backend/pkg/db/postgres/messages-ios.go @@ -6,7 +6,8 @@ import ( "openreplay/backend/pkg/url" ) -func (conn *Conn) InsertIOSCustomEvent(sessionID uint64, e *messages.IOSCustomEvent) error { +func (conn *Conn) InsertIOSCustomEvent(e *messages.IOSCustomEvent) error { + sessionID := e.SessionID() err := conn.InsertCustomEvent(sessionID, e.Timestamp, truncSqIdx(e.Index), e.Name, e.Payload) if err == nil { conn.insertAutocompleteValue(sessionID, 0, "CUSTOM_IOS", e.Name) @@ -14,7 +15,8 @@ func (conn *Conn) InsertIOSCustomEvent(sessionID uint64, e *messages.IOSCustomEv return err } -func (conn *Conn) InsertIOSUserID(sessionID uint64, userID *messages.IOSUserID) error { +func (conn *Conn) InsertIOSUserID(userID *messages.IOSUserID) error { + sessionID := userID.SessionID() err := conn.InsertUserID(sessionID, userID.Value) if err == nil { conn.insertAutocompleteValue(sessionID, 0, "USERID_IOS", userID.Value) @@ -22,7 +24,8 @@ func (conn *Conn) InsertIOSUserID(sessionID uint64, userID *messages.IOSUserID) return err } -func (conn *Conn) InsertIOSUserAnonymousID(sessionID uint64, userAnonymousID *messages.IOSUserAnonymousID) error { +func (conn *Conn) InsertIOSUserAnonymousID(userAnonymousID *messages.IOSUserAnonymousID) error { + sessionID := userAnonymousID.SessionID() err := conn.InsertUserAnonymousID(sessionID, userAnonymousID.Value) if err == nil { conn.insertAutocompleteValue(sessionID, 0, "USERANONYMOUSID_IOS", userAnonymousID.Value) @@ -30,7 +33,8 @@ func (conn *Conn) InsertIOSUserAnonymousID(sessionID uint64, userAnonymousID *me return err } -func (conn *Conn) InsertIOSNetworkCall(sessionID uint64, e *messages.IOSNetworkCall) error { +func (conn *Conn) InsertIOSNetworkCall(e *messages.IOSNetworkCall) error { + sessionID := e.SessionID() err := conn.InsertRequest(sessionID, e.Timestamp, truncSqIdx(e.Index), e.URL, e.Duration, e.Success) if err == nil { conn.insertAutocompleteValue(sessionID, 0, "REQUEST_IOS", url.DiscardURLQuery(e.URL)) @@ -38,7 +42,8 @@ func (conn *Conn) InsertIOSNetworkCall(sessionID uint64, e *messages.IOSNetworkC return err } -func (conn *Conn) InsertIOSScreenEnter(sessionID uint64, screenEnter *messages.IOSScreenEnter) error { +func (conn *Conn) InsertIOSScreenEnter(screenEnter *messages.IOSScreenEnter) error { + sessionID := screenEnter.SessionID() tx, err := conn.c.Begin() if err != nil { return err @@ -69,7 +74,8 @@ func (conn *Conn) InsertIOSScreenEnter(sessionID uint64, screenEnter *messages.I return nil } -func (conn *Conn) InsertIOSClickEvent(sessionID uint64, clickEvent *messages.IOSClickEvent) error { +func (conn *Conn) InsertIOSClickEvent(clickEvent *messages.IOSClickEvent) error { + sessionID := clickEvent.SessionID() tx, err := conn.c.Begin() if err != nil { return err @@ -100,7 +106,8 @@ func (conn *Conn) InsertIOSClickEvent(sessionID uint64, clickEvent *messages.IOS return nil } -func (conn *Conn) InsertIOSInputEvent(sessionID uint64, inputEvent *messages.IOSInputEvent) error { +func (conn *Conn) InsertIOSInputEvent(inputEvent *messages.IOSInputEvent) error { + sessionID := inputEvent.SessionID() tx, err := conn.c.Begin() if err != nil { return err @@ -137,7 +144,8 @@ func (conn *Conn) InsertIOSInputEvent(sessionID uint64, inputEvent *messages.IOS return nil } -func (conn *Conn) InsertIOSCrash(sessionID uint64, projectID uint32, crash *messages.IOSCrash) error { +func (conn *Conn) InsertIOSCrash(projectID uint32, crash *messages.IOSCrash) error { + sessionID := crash.SessionID() tx, err := conn.c.Begin() if err != nil { return err diff --git a/backend/pkg/db/postgres/messages-web-stats.go b/backend/pkg/db/postgres/messages-web-stats.go index 42458a497..47bd06974 100644 --- a/backend/pkg/db/postgres/messages-web-stats.go +++ b/backend/pkg/db/postgres/messages-web-stats.go @@ -5,7 +5,8 @@ import ( "openreplay/backend/pkg/url" ) -func (conn *Conn) InsertWebStatsPerformance(sessionID uint64, p *PerformanceTrackAggr) error { +func (conn *Conn) InsertWebStatsPerformance(p *PerformanceTrackAggr) error { + sessionID := p.SessionID() timestamp := (p.TimestampEnd + p.TimestampStart) / 2 sqlRequest := ` @@ -35,40 +36,37 @@ func (conn *Conn) InsertWebStatsPerformance(sessionID uint64, p *PerformanceTrac return nil } -func (conn *Conn) InsertWebStatsResourceEvent(sessionID uint64, e *ResourceEvent) error { +func (conn *Conn) InsertWebStatsResourceEvent(e *ResourceTiming) error { + sessionID := e.SessionID() host, _, _, err := url.GetURLParts(e.URL) if err != nil { return err } - + msgType := url.GetResourceType(e.Initiator, e.URL) sqlRequest := ` INSERT INTO events.resources ( session_id, timestamp, message_id, type, url, url_host, url_hostpath, success, status, - method, duration, ttfb, header_size, encoded_body_size, decoded_body_size ) VALUES ( $1, $2, $3, $4, LEFT($5, 8000), LEFT($6, 300), LEFT($7, 2000), $8, $9, - NULLIF($10, '')::events.resource_method, - NULLIF($11, 0), NULLIF($12, 0), NULLIF($13, 0), NULLIF($14, 0), NULLIF($15, 0) + NULLIF($10, 0), NULLIF($11, 0), NULLIF($12, 0), NULLIF($13, 0), NULLIF($14, 0) )` urlQuery := url.DiscardURLQuery(e.URL) - urlMethod := url.EnsureMethod(e.Method) conn.batchQueue(sessionID, sqlRequest, - sessionID, e.Timestamp, truncSqIdx(e.MessageID), - e.Type, + sessionID, e.Timestamp, truncSqIdx(e.MsgID()), + msgType, e.URL, host, urlQuery, - e.Success, e.Status, - urlMethod, + e.Duration != 0, 0, e.Duration, e.TTFB, e.HeaderSize, e.EncodedBodySize, e.DecodedBodySize, ) // Record approximate message size - conn.updateBatchSize(sessionID, len(sqlRequest)+len(e.Type)+len(e.URL)+len(host)+len(urlQuery)+len(urlMethod)+8*9+1) + conn.updateBatchSize(sessionID, len(sqlRequest)+len(msgType)+len(e.URL)+len(host)+len(urlQuery)+8*9+1) return nil } diff --git a/backend/pkg/db/postgres/messages-web.go b/backend/pkg/db/postgres/messages-web.go index 08db4491e..6b39f0eca 100644 --- a/backend/pkg/db/postgres/messages-web.go +++ b/backend/pkg/db/postgres/messages-web.go @@ -2,8 +2,8 @@ package postgres import ( "log" - "openreplay/backend/pkg/db/types" + "openreplay/backend/pkg/hashid" . "openreplay/backend/pkg/messages" "openreplay/backend/pkg/url" ) @@ -57,10 +57,13 @@ func (conn *Conn) InsertWebPageEvent(sessionID uint64, projectID uint32, e *Page return nil } -func (conn *Conn) InsertWebClickEvent(sessionID uint64, projectID uint32, e *ClickEvent) error { +func (conn *Conn) InsertWebClickEvent(sessionID uint64, projectID uint32, e *MouseClick) error { + if e.Label == "" { + return nil + } var host, path string host, path, _, _ = url.GetURLParts(e.Url) - if err := conn.bulks.Get("webClickEvents").Append(sessionID, truncSqIdx(e.MessageID), e.Timestamp, e.Label, e.Selector, host+path, path); err != nil { + if err := conn.bulks.Get("webClickEvents").Append(sessionID, truncSqIdx(e.MsgID()), e.Timestamp, e.Label, e.Selector, host+path, path, e.HesitationTime); err != nil { log.Printf("insert web click err: %s", err) } // Accumulate session updates and exec inside batch with another sql commands @@ -86,6 +89,22 @@ func (conn *Conn) InsertWebInputEvent(sessionID uint64, projectID uint32, e *Inp return nil } +func (conn *Conn) InsertWebInputDuration(sessionID uint64, projectID uint32, e *InputChange) error { + if e.Label == "" { + return nil + } + value := &e.Value + if e.ValueMasked { + value = nil + } + if err := conn.bulks.Get("webInputDurations").Append(sessionID, truncSqIdx(e.ID), e.Timestamp, value, e.Label, e.HesitationTime, e.InputDuration); err != nil { + log.Printf("insert web input event err: %s", err) + } + conn.updateSessionEvents(sessionID, 1, 0) + conn.insertAutocompleteValue(sessionID, projectID, "INPUT", e.Label) + return nil +} + func (conn *Conn) InsertWebErrorEvent(sessionID uint64, projectID uint32, e *types.ErrorEvent) error { errorID := e.ID(projectID) if err := conn.bulks.Get("webErrors").Append(errorID, projectID, e.Source, e.Name, e.Message, e.Payload); err != nil { @@ -142,3 +161,15 @@ func (conn *Conn) InsertSessionReferrer(sessionID uint64, referrer string) error WHERE session_id = $3 AND referrer IS NULL`, referrer, url.DiscardURLQuery(referrer), sessionID) } + +func (conn *Conn) InsertMouseThrashing(sessionID uint64, projectID uint32, e *MouseThrashing) error { + issueID := hashid.MouseThrashingID(projectID, sessionID, e.Timestamp) + if err := conn.bulks.Get("webIssues").Append(projectID, issueID, "mouse_thrashing", e.Url); err != nil { + log.Printf("insert web issue err: %s", err) + } + if err := conn.bulks.Get("webIssueEvents").Append(sessionID, issueID, e.Timestamp, truncSqIdx(e.MsgID()), nil); err != nil { + log.Printf("insert web issue event err: %s", err) + } + conn.updateSessionIssues(sessionID, 0, 50) + return nil +} diff --git a/backend/pkg/db/types/error-event.go b/backend/pkg/db/types/error-event.go index bef9abd99..9f2f1a886 100644 --- a/backend/pkg/db/types/error-event.go +++ b/backend/pkg/db/types/error-event.go @@ -120,3 +120,15 @@ func (e *ErrorEvent) ID(projectID uint32) string { } return strconv.FormatUint(uint64(projectID), 16) + hex.EncodeToString(hash.Sum(nil)) } + +func WrapCustomEvent(m *CustomEvent) *IssueEvent { + msg := &IssueEvent{ + Type: "custom", + Timestamp: m.Time(), + MessageID: m.MsgID(), + ContextString: m.Name, + Payload: m.Payload, + } + msg.Meta().SetMeta(m.Meta()) + return msg +} diff --git a/backend/pkg/handlers/custom/eventMapper.go b/backend/pkg/handlers/custom/eventMapper.go deleted file mode 100644 index a85ebbdf0..000000000 --- a/backend/pkg/handlers/custom/eventMapper.go +++ /dev/null @@ -1,82 +0,0 @@ -package custom - -import ( - "net/url" - "strings" - - . "openreplay/backend/pkg/messages" -) - -func getURLExtention(URL string) string { - u, err := url.Parse(URL) - if err != nil { - return "" - } - i := strings.LastIndex(u.Path, ".") - return u.Path[i+1:] -} - -func getResourceType(initiator string, URL string) string { - switch initiator { - case "xmlhttprequest", "fetch": - return "fetch" - case "img": - return "img" - default: - switch getURLExtention(URL) { - case "css": - return "stylesheet" - case "js": - return "script" - case "png", "gif", "jpg", "jpeg", "svg": - return "img" - case "mp4", "mkv", "ogg", "webm", "avi", "mp3": - return "media" - default: - return "other" - } - } -} - -type EventMapper struct{} - -func (b *EventMapper) Build() Message { - return nil -} - -func (b *EventMapper) Handle(message Message, messageID uint64, timestamp uint64) Message { - switch msg := message.(type) { - case *MouseClick: - if msg.Label != "" { - return &ClickEvent{ - MessageID: messageID, - Label: msg.Label, - HesitationTime: msg.HesitationTime, - Timestamp: timestamp, - Selector: msg.Selector, - } - } - case *ResourceTiming: - return &ResourceEvent{ - MessageID: messageID, - Timestamp: msg.Timestamp, - Duration: msg.Duration, - TTFB: msg.TTFB, - HeaderSize: msg.HeaderSize, - EncodedBodySize: msg.EncodedBodySize, - DecodedBodySize: msg.DecodedBodySize, - URL: msg.URL, - Type: getResourceType(msg.Initiator, msg.URL), - Success: msg.Duration != 0, - } - case *CustomIssue: - return &IssueEvent{ - Type: "custom", - Timestamp: timestamp, - MessageID: messageID, - ContextString: msg.Name, - Payload: msg.Payload, - } - } - return nil -} diff --git a/backend/pkg/handlers/custom/inputEventBuilder.go b/backend/pkg/handlers/custom/inputEventBuilder.go index e07470f37..d057db3e3 100644 --- a/backend/pkg/handlers/custom/inputEventBuilder.go +++ b/backend/pkg/handlers/custom/inputEventBuilder.go @@ -4,7 +4,7 @@ import ( . "openreplay/backend/pkg/messages" ) -const INPUT_EVENT_TIMEOUT = 1 * 60 * 1000 +const InputEventTimeout = 1 * 60 * 1000 type inputLabels map[uint64]string @@ -24,7 +24,7 @@ func (b *inputEventBuilder) clearLabels() { b.inputLabels = make(inputLabels) } -func (b *inputEventBuilder) Handle(message Message, messageID uint64, timestamp uint64) Message { +func (b *inputEventBuilder) Handle(message Message, timestamp uint64) Message { var inputEvent Message = nil switch msg := message.(type) { case *SetInputTarget: @@ -41,7 +41,7 @@ func (b *inputEventBuilder) Handle(message Message, messageID uint64, timestamp } if b.inputEvent == nil { b.inputEvent = &InputEvent{ - MessageID: messageID, + MessageID: message.MsgID(), Timestamp: timestamp, Value: msg.Value, ValueMasked: msg.Mask > 0, @@ -59,7 +59,7 @@ func (b *inputEventBuilder) Handle(message Message, messageID uint64, timestamp return b.Build() } - if b.inputEvent != nil && b.inputEvent.Timestamp+INPUT_EVENT_TIMEOUT < timestamp { + if b.inputEvent != nil && b.inputEvent.Timestamp+InputEventTimeout < timestamp { return b.Build() } return nil diff --git a/backend/pkg/handlers/custom/pageEventBuilder.go b/backend/pkg/handlers/custom/pageEventBuilder.go index d95768983..5bab7d4cc 100644 --- a/backend/pkg/handlers/custom/pageEventBuilder.go +++ b/backend/pkg/handlers/custom/pageEventBuilder.go @@ -4,7 +4,7 @@ import ( . "openreplay/backend/pkg/messages" ) -const PAGE_EVENT_TIMEOUT = 1 * 60 * 1000 +const PageEventTimeout = 1 * 60 * 1000 type pageEventBuilder struct { pageEvent *PageEvent @@ -16,7 +16,7 @@ func NewPageEventBuilder() *pageEventBuilder { return ieBuilder } -func (b *pageEventBuilder) Handle(message Message, messageID uint64, timestamp uint64) Message { +func (b *pageEventBuilder) Handle(message Message, timestamp uint64) Message { switch msg := message.(type) { case *SetPageLocation: if msg.NavigationStart == 0 { // routing without new page loading @@ -24,7 +24,7 @@ func (b *pageEventBuilder) Handle(message Message, messageID uint64, timestamp u URL: msg.URL, Referrer: msg.Referrer, Loaded: false, - MessageID: messageID, + MessageID: message.MsgID(), Timestamp: timestamp, } } else { @@ -33,7 +33,7 @@ func (b *pageEventBuilder) Handle(message Message, messageID uint64, timestamp u URL: msg.URL, Referrer: msg.Referrer, Loaded: true, - MessageID: messageID, + MessageID: message.MsgID(), Timestamp: timestamp, } return pageEvent @@ -81,7 +81,7 @@ func (b *pageEventBuilder) Handle(message Message, messageID uint64, timestamp u } - if b.pageEvent != nil && b.pageEvent.Timestamp+PAGE_EVENT_TIMEOUT < timestamp { + if b.pageEvent != nil && b.pageEvent.Timestamp+PageEventTimeout < timestamp { return b.Build() } return nil diff --git a/backend/pkg/handlers/ios/clickRage.go b/backend/pkg/handlers/ios/clickRage.go index 84c130dae..91283de90 100644 --- a/backend/pkg/handlers/ios/clickRage.go +++ b/backend/pkg/handlers/ios/clickRage.go @@ -48,7 +48,7 @@ func (h *ClickRageDetector) Handle(message Message, messageID uint64, timestamp } func (h *ClickRageDetector) Build() Message { - if h.countsInARow >= web.MIN_CLICKS_IN_A_ROW { + if h.countsInARow >= web.MinClicksInARow { event := &IOSIssueEvent{ Type: "click_rage", ContextString: h.lastLabel, diff --git a/backend/pkg/handlers/messageProcessor.go b/backend/pkg/handlers/messageProcessor.go index c4235c18b..38d02ab1a 100644 --- a/backend/pkg/handlers/messageProcessor.go +++ b/backend/pkg/handlers/messageProcessor.go @@ -6,6 +6,6 @@ import . "openreplay/backend/pkg/messages" // U can create your own message handler and easily connect to heuristics service type MessageProcessor interface { - Handle(message Message, messageID uint64, timestamp uint64) Message + Handle(message Message, timestamp uint64) Message Build() Message } diff --git a/backend/pkg/handlers/web/clickRage.go b/backend/pkg/handlers/web/clickRage.go index 6974ee1b0..56692765e 100644 --- a/backend/pkg/handlers/web/clickRage.go +++ b/backend/pkg/handlers/web/clickRage.go @@ -7,14 +7,8 @@ import ( . "openreplay/backend/pkg/messages" ) -/* - Handler name: ClickRage - Input event: MouseClick - Output event: IssueEvent -*/ - -const MAX_TIME_DIFF = 300 -const MIN_CLICKS_IN_A_ROW = 3 +const MaxTimeDiff = 300 +const MinClicksInARow = 3 type ClickRageDetector struct { lastTimestamp uint64 @@ -34,46 +28,54 @@ func (crd *ClickRageDetector) reset() { crd.url = "" } -func (crd *ClickRageDetector) Build() Message { - defer crd.reset() - if crd.countsInARow >= MIN_CLICKS_IN_A_ROW { - payload, err := json.Marshal(struct{ Count int }{crd.countsInARow}) - if err != nil { - log.Printf("can't marshal ClickRage payload to json: %s", err) - } - event := &IssueEvent{ - Type: "click_rage", - ContextString: crd.lastLabel, - Payload: string(payload), - Timestamp: crd.firstInARawTimestamp, - MessageID: crd.firstInARawMessageId, - URL: crd.url, - } - return event +func (crd *ClickRageDetector) createPayload() string { + p, err := json.Marshal(struct{ Count int }{crd.countsInARow}) + if err != nil { + log.Printf("can't marshal ClickRage payload to json: %s", err) + return "" } - return nil + return string(p) } -func (crd *ClickRageDetector) Handle(message Message, messageID uint64, timestamp uint64) Message { +func (crd *ClickRageDetector) Build() Message { + defer crd.reset() + if crd.countsInARow < MinClicksInARow { + return nil + } + return &IssueEvent{ + Type: "click_rage", + ContextString: crd.lastLabel, + Payload: crd.createPayload(), + Timestamp: crd.firstInARawTimestamp, + MessageID: crd.firstInARawMessageId, + URL: crd.url, + } +} + +func (crd *ClickRageDetector) Handle(message Message, timestamp uint64) Message { switch msg := message.(type) { case *MouseClick: + // Set click url if crd.url == "" && msg.Url != "" { crd.url = msg.Url } - // TODO: check if we it is ok to capture clickRage event without the connected ClickEvent in db. + // Click on different object -> build if we can and reset the builder if msg.Label == "" { return crd.Build() } - if crd.lastLabel == msg.Label && timestamp-crd.lastTimestamp < MAX_TIME_DIFF { + // Update builder with last information + if crd.lastLabel == msg.Label && timestamp-crd.lastTimestamp < MaxTimeDiff { crd.lastTimestamp = timestamp crd.countsInARow += 1 return nil } + // Try to build event event := crd.Build() + // Use current message as init values for new event crd.lastTimestamp = timestamp crd.lastLabel = msg.Label crd.firstInARawTimestamp = timestamp - crd.firstInARawMessageId = messageID + crd.firstInARawMessageId = message.MsgID() crd.countsInARow = 1 if crd.url == "" && msg.Url != "" { crd.url = msg.Url diff --git a/backend/pkg/handlers/web/cpuIssue.go b/backend/pkg/handlers/web/cpuIssue.go index 56f483e8b..74117fc1f 100644 --- a/backend/pkg/handlers/web/cpuIssue.go +++ b/backend/pkg/handlers/web/cpuIssue.go @@ -15,8 +15,8 @@ import ( Output event: IssueEvent */ -const CPU_THRESHOLD = 70 // % out of 100 -const CPU_MIN_DURATION_TRIGGER = 6 * 1000 +const CpuThreshold = 70 // % out of 100 +const CpuMinDurationTrigger = 6 * 1000 type CpuIssueDetector struct { startTimestamp uint64 @@ -26,65 +26,61 @@ type CpuIssueDetector struct { contextString string } -func (f *CpuIssueDetector) Build() Message { - if f.startTimestamp == 0 { - return nil - } - duration := f.lastTimestamp - f.startTimestamp - timestamp := f.startTimestamp - messageID := f.startMessageID - maxRate := f.maxRate - - f.startTimestamp = 0 - f.startMessageID = 0 - f.maxRate = 0 - if duration < CPU_MIN_DURATION_TRIGGER { - return nil - } - - payload, err := json.Marshal(struct { +func (f *CpuIssueDetector) createPayload() string { + p, err := json.Marshal(struct { Duration uint64 Rate uint64 - }{duration, maxRate}) + }{f.duration(), f.maxRate}) if err != nil { log.Printf("can't marshal CpuIssue payload to json: %s", err) } + return string(p) +} +func (f *CpuIssueDetector) duration() uint64 { + return f.lastTimestamp - f.startTimestamp +} + +func (f *CpuIssueDetector) reset() { + f.startTimestamp = 0 + f.startMessageID = 0 + f.maxRate = 0 +} + +func (f *CpuIssueDetector) Build() Message { + defer f.reset() + if f.startTimestamp == 0 || f.duration() < CpuMinDurationTrigger { + return nil + } return &IssueEvent{ Type: "cpu", - Timestamp: timestamp, - MessageID: messageID, + Timestamp: f.startTimestamp, + MessageID: f.startMessageID, ContextString: f.contextString, - Payload: string(payload), + Payload: f.createPayload(), } } -func (f *CpuIssueDetector) Handle(message Message, messageID uint64, timestamp uint64) Message { +func (f *CpuIssueDetector) Handle(message Message, timestamp uint64) Message { switch msg := message.(type) { case *PerformanceTrack: - dt := performance.TimeDiff(timestamp, f.lastTimestamp) - if dt == 0 { - return nil // TODO: handle error + // Ignore if it's a wrong message order + if timestamp < f.lastTimestamp { + return nil } - f.lastTimestamp = timestamp - - if msg.Frames == -1 || msg.Ticks == -1 { + cpuRate := performance.CPURate(msg.Ticks, performance.TimeDiff(timestamp, f.lastTimestamp)) + // Build event if cpu issue have gone + if msg.Frames == -1 || msg.Ticks == -1 || cpuRate < CpuThreshold { return f.Build() } - - cpuRate := performance.CPURate(msg.Ticks, dt) - - if cpuRate >= CPU_THRESHOLD { - if f.startTimestamp == 0 { - f.startTimestamp = timestamp - f.startMessageID = messageID - } - if f.maxRate < cpuRate { - f.maxRate = cpuRate - } - } else { - return f.Build() + // Update values + if f.startTimestamp == 0 { + f.startTimestamp = timestamp + f.startMessageID = message.MsgID() + } + if f.maxRate < cpuRate { + f.maxRate = cpuRate } case *SetPageLocation: f.contextString = msg.URL diff --git a/backend/pkg/handlers/web/deadClick.go b/backend/pkg/handlers/web/deadClick.go index 434e6c1ce..5b02dc498 100644 --- a/backend/pkg/handlers/web/deadClick.go +++ b/backend/pkg/handlers/web/deadClick.go @@ -4,43 +4,39 @@ import ( . "openreplay/backend/pkg/messages" ) -/* - Handler name: DeadClick - Input events: SetInputTarget, - CreateDocument, - MouseClick, - SetNodeAttribute, - RemoveNodeAttribute, - CreateElementNode, - CreateTextNode, - MoveNode, - RemoveNode, - SetCSSData, - CSSInsertRule, - CSSDeleteRule - Output event: IssueEvent -*/ - -const CLICK_RELATION_TIME = 1234 +const ClickRelationTime = 1234 type DeadClickDetector struct { - lastTimestamp uint64 lastMouseClick *MouseClick + lastTimestamp uint64 lastClickTimestamp uint64 lastMessageID uint64 inputIDSet map[uint64]bool } +func NewDeadClickDetector() *DeadClickDetector { + return &DeadClickDetector{inputIDSet: make(map[uint64]bool)} +} + +func (d *DeadClickDetector) addInputID(id uint64) { + d.inputIDSet[id] = true +} + +func (d *DeadClickDetector) clearInputIDs() { + d.inputIDSet = make(map[uint64]bool) +} + func (d *DeadClickDetector) reset() { - d.inputIDSet = nil d.lastMouseClick = nil d.lastClickTimestamp = 0 d.lastMessageID = 0 + d.clearInputIDs() } -func (d *DeadClickDetector) build(timestamp uint64) Message { +func (d *DeadClickDetector) Build() Message { + // remove reset from external Build call defer d.reset() - if d.lastMouseClick == nil || d.lastClickTimestamp+CLICK_RELATION_TIME > timestamp { // reaction is instant + if d.lastMouseClick == nil || d.lastClickTimestamp+ClickRelationTime > d.lastTimestamp { // reaction is instant return nil } event := &IssueEvent{ @@ -52,42 +48,37 @@ func (d *DeadClickDetector) build(timestamp uint64) Message { return event } -func (d *DeadClickDetector) Build() Message { - return d.build(d.lastTimestamp) -} - -func (d *DeadClickDetector) Handle(message Message, messageID uint64, timestamp uint64) Message { +func (d *DeadClickDetector) Handle(message Message, timestamp uint64) Message { d.lastTimestamp = timestamp switch msg := message.(type) { case *SetInputTarget: - if d.inputIDSet == nil { - d.inputIDSet = make(map[uint64]bool) - } - d.inputIDSet[msg.ID] = true + d.addInputID(msg.ID) case *CreateDocument: - d.inputIDSet = nil + d.clearInputIDs() case *MouseClick: if msg.Label == "" { return nil } - event := d.build(timestamp) - if d.inputIDSet[msg.ID] { // ignore if input + isInputEvent := d.inputIDSet[msg.ID] + event := d.Build() + if isInputEvent { return event } d.lastMouseClick = msg d.lastClickTimestamp = timestamp - d.lastMessageID = messageID + d.lastMessageID = message.MsgID() return event case *SetNodeAttribute, *RemoveNodeAttribute, *CreateElementNode, *CreateTextNode, + *SetNodeFocus, *MoveNode, *RemoveNode, *SetCSSData, *CSSInsertRule, *CSSDeleteRule: - return d.build(timestamp) + return d.Build() } return nil } diff --git a/backend/pkg/handlers/web/domDrop.go b/backend/pkg/handlers/web/domDrop.go deleted file mode 100644 index 4a3ec2065..000000000 --- a/backend/pkg/handlers/web/domDrop.go +++ /dev/null @@ -1,55 +0,0 @@ -package web - -import ( - . "openreplay/backend/pkg/messages" -) - -/* - Handler name: DomDrop - Input events: CreateElementNode, - CreateTextNode, - RemoveNode - Output event: DOMDrop -*/ - -const DROP_WINDOW = 200 //ms -const CRITICAL_COUNT = 1 // Our login page contains 20. But on crush it removes only roots (1-3 nodes). -// TODO: smart detection (making whole DOM tree would eat all memory) - -type domDropDetector struct { - removedCount int - lastDropTimestamp uint64 -} - -func (dd *domDropDetector) reset() { - dd.removedCount = 0 - dd.lastDropTimestamp = 0 -} - -func (dd *domDropDetector) Handle(message Message, _ uint64, timestamp uint64) Message { - switch message.(type) { - case *CreateElementNode, - *CreateTextNode: - dd.removedCount = 0 - dd.lastDropTimestamp = 0 - case *RemoveNode: - if dd.lastDropTimestamp+DROP_WINDOW > timestamp { - dd.removedCount += 1 - } else { - dd.removedCount = 1 - } - dd.lastDropTimestamp = timestamp - } - return nil -} - -func (dd *domDropDetector) Build() Message { - defer dd.reset() - if dd.removedCount >= CRITICAL_COUNT { - domDrop := &DOMDrop{ - Timestamp: dd.lastDropTimestamp, - } - return domDrop - } - return nil -} diff --git a/backend/pkg/handlers/web/memoryIssue.go b/backend/pkg/handlers/web/memoryIssue.go index 487c396a9..4b3022d74 100644 --- a/backend/pkg/handlers/web/memoryIssue.go +++ b/backend/pkg/handlers/web/memoryIssue.go @@ -8,13 +8,6 @@ import ( . "openreplay/backend/pkg/messages" ) -/* - Handler name: MemoryIssue - Input events: PerformanceTrack, - SetPageLocation - Output event: IssueEvent -*/ - const MIN_COUNT = 3 const MEM_RATE_THRESHOLD = 300 // % to average @@ -52,7 +45,7 @@ func (f *MemoryIssueDetector) Build() Message { return event } -func (f *MemoryIssueDetector) Handle(message Message, messageID uint64, timestamp uint64) Message { +func (f *MemoryIssueDetector) Handle(message Message, timestamp uint64) Message { switch msg := message.(type) { case *PerformanceTrack: if f.count < MIN_COUNT { @@ -70,7 +63,7 @@ func (f *MemoryIssueDetector) Handle(message Message, messageID uint64, timestam if rate >= MEM_RATE_THRESHOLD { if f.startTimestamp == 0 { f.startTimestamp = timestamp - f.startMessageID = messageID + f.startMessageID = message.MsgID() } if f.rate < rate { f.rate = rate diff --git a/backend/pkg/handlers/web/networkIssue.go b/backend/pkg/handlers/web/networkIssue.go index 20ef412dd..9dd1f7a04 100644 --- a/backend/pkg/handlers/web/networkIssue.go +++ b/backend/pkg/handlers/web/networkIssue.go @@ -4,26 +4,19 @@ import ( . "openreplay/backend/pkg/messages" ) -/* - Handler name: NetworkIssue - Input events: ResourceTiming, - NetworkRequest - Output event: IssueEvent -*/ - type NetworkIssueDetector struct{} func (f *NetworkIssueDetector) Build() Message { return nil } -func (f *NetworkIssueDetector) Handle(message Message, messageID uint64, timestamp uint64) Message { +func (f *NetworkIssueDetector) Handle(message Message, timestamp uint64) Message { switch msg := message.(type) { case *NetworkRequest: if msg.Status >= 400 { return &IssueEvent{ Type: "bad_request", - MessageID: messageID, + MessageID: message.MsgID(), Timestamp: msg.Timestamp, ContextString: msg.URL, } diff --git a/backend/pkg/handlers/web/performanceAggregator.go b/backend/pkg/handlers/web/performanceAggregator.go index ba23978b2..babe136a5 100644 --- a/backend/pkg/handlers/web/performanceAggregator.go +++ b/backend/pkg/handlers/web/performanceAggregator.go @@ -7,13 +7,7 @@ import ( "openreplay/backend/pkg/messages/performance" ) -/* - Handler name: PerformanceAggregator - Input event: PerformanceTrack - Output event: PerformanceTrackAggr -*/ - -const AGGREGATION_WINDOW = 2 * 60 * 1000 +const AggregationWindow = 2 * 60 * 1000 type PerformanceAggregator struct { *PerformanceTrackAggr @@ -42,7 +36,7 @@ func (b *PerformanceAggregator) reset() { b.lastTimestamp = 0 } -func (b *PerformanceAggregator) Handle(message Message, _ uint64, timestamp uint64) Message { +func (b *PerformanceAggregator) Handle(message Message, timestamp uint64) Message { switch msg := message.(type) { case *PerformanceTrack: if b.PerformanceTrackAggr == nil || msg.Frames == -1 || msg.Ticks == -1 { @@ -93,7 +87,7 @@ func (b *PerformanceAggregator) Handle(message Message, _ uint64, timestamp uint b.lastTimestamp = timestamp } if b.PerformanceTrackAggr != nil && - timestamp-b.PerformanceTrackAggr.TimestampStart >= AGGREGATION_WINDOW { + timestamp-b.PerformanceTrackAggr.TimestampStart >= AggregationWindow { return b.Build() } return nil diff --git a/backend/pkg/hashid/hashid.go b/backend/pkg/hashid/hashid.go index 25ce11369..5bcb23578 100644 --- a/backend/pkg/hashid/hashid.go +++ b/backend/pkg/hashid/hashid.go @@ -23,3 +23,11 @@ func IOSCrashID(projectID uint32, crash *messages.IOSCrash) string { hash.Write([]byte(crash.Stacktrace)) return strconv.FormatUint(uint64(projectID), 16) + hex.EncodeToString(hash.Sum(nil)) } + +func MouseThrashingID(projectID uint32, sessID, ts uint64) string { + hash := fnv.New128a() + hash.Write([]byte("mouse_trashing")) + hash.Write([]byte(strconv.FormatUint(sessID, 10))) + hash.Write([]byte(strconv.FormatUint(ts, 10))) + return strconv.FormatUint(uint64(projectID), 16) + hex.EncodeToString(hash.Sum(nil)) +} diff --git a/backend/pkg/messages/filters.go b/backend/pkg/messages/filters.go index 30e266194..f8997f418 100644 --- a/backend/pkg/messages/filters.go +++ b/backend/pkg/messages/filters.go @@ -2,7 +2,7 @@ package messages func IsReplayerType(id int) bool { - return 1 != id && 3 != id && 17 != id && 23 != id && 24 != id && 25 != id && 26 != id && 27 != id && 28 != id && 29 != id && 30 != id && 31 != id && 32 != id && 33 != id && 35 != id && 42 != id && 52 != id && 56 != id && 62 != id && 63 != id && 64 != id && 66 != id && 78 != id && 80 != id && 81 != id && 82 != id && 125 != id && 126 != id && 127 != id && 107 != id && 91 != id && 92 != id && 94 != id && 95 != id && 97 != id && 98 != id && 99 != id && 101 != id && 104 != id && 110 != id && 111 != id + return 1 != id && 3 != id && 17 != id && 23 != id && 24 != id && 25 != id && 26 != id && 27 != id && 28 != id && 29 != id && 30 != id && 31 != id && 32 != id && 42 != id && 56 != id && 62 != id && 63 != id && 64 != id && 66 != id && 78 != id && 80 != id && 81 != id && 82 != id && 112 != id && 115 != id && 125 != id && 126 != id && 127 != id && 107 != id && 91 != id && 92 != id && 94 != id && 95 != id && 97 != id && 98 != id && 99 != id && 101 != id && 104 != id && 110 != id && 111 != id } func IsIOSType(id int) bool { @@ -10,5 +10,5 @@ func IsIOSType(id int) bool { } func IsDOMType(id int) bool { - return 0 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 37 == id || 38 == id || 49 == id || 50 == id || 51 == id || 54 == id || 55 == id || 57 == id || 58 == id || 59 == id || 60 == id || 61 == id || 67 == id || 69 == id || 70 == id || 71 == id || 72 == id || 73 == id || 74 == id || 75 == id || 76 == id || 77 == id || 90 == id || 93 == id || 96 == id || 100 == id || 102 == id || 103 == id || 105 == id + return 0 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 37 == id || 38 == id || 49 == id || 50 == id || 51 == id || 54 == id || 55 == id || 57 == id || 58 == id || 59 == id || 60 == id || 61 == id || 67 == id || 69 == id || 70 == id || 71 == id || 72 == id || 73 == id || 74 == id || 75 == id || 76 == id || 77 == id || 113 == id || 114 == id || 90 == id || 93 == id || 96 == id || 100 == id || 102 == id || 103 == id || 105 == id } diff --git a/backend/pkg/messages/iterator-ender.go b/backend/pkg/messages/iterator-ender.go index 6ca7db034..b822d166e 100644 --- a/backend/pkg/messages/iterator-ender.go +++ b/backend/pkg/messages/iterator-ender.go @@ -1,179 +1,30 @@ package messages -import ( - "fmt" - "log" -) - -type enderMessageIteratorImpl struct { - filter map[int]struct{} - preFilter map[int]struct{} - handler MessageHandler - autoDecode bool - version uint64 - size uint64 - canSkip bool - broken bool - messageInfo *message - batchInfo *BatchInfo - urls *pageLocations +type enderIteratorImpl struct { + coreIterator MessageIterator + handler MessageHandler + lastMessage Message } func NewEnderMessageIterator(messageHandler MessageHandler, messageFilter []int, autoDecode bool) MessageIterator { - iter := &enderMessageIteratorImpl{ - handler: messageHandler, - autoDecode: autoDecode, - urls: NewPageLocations(), + enderIter := &enderIteratorImpl{ + handler: messageHandler, } - if len(messageFilter) != 0 { - filter := make(map[int]struct{}, len(messageFilter)) - for _, msgType := range messageFilter { - filter[msgType] = struct{}{} - } - iter.filter = filter - } - iter.preFilter = map[int]struct{}{ - MsgBatchMetadata: {}, MsgBatchMeta: {}, MsgTimestamp: {}, - MsgSessionStart: {}, MsgSessionEnd: {}, MsgSetPageLocation: {}, - } - return iter + enderIter.coreIterator = NewMessageIterator(enderIter.handle, messageFilter, autoDecode) + return enderIter } -func (i *enderMessageIteratorImpl) prepareVars(batchInfo *BatchInfo) { - i.batchInfo = batchInfo - i.messageInfo = &message{batch: batchInfo} - i.version = 0 - i.canSkip = false - i.broken = false - i.size = 0 +func (e *enderIteratorImpl) handle(message Message) { + e.lastMessage = message } -func (i *enderMessageIteratorImpl) Iterate(batchData []byte, batchInfo *BatchInfo) { - // Create new message reader - reader := NewMessageReader(batchData) - - // Pre-decode batch data - if err := reader.Parse(); err != nil { - log.Printf("pre-decode batch err: %s, info: %s", err, batchInfo.Info()) - return +func (e *enderIteratorImpl) Iterate(batchData []byte, batchInfo *BatchInfo) { + // Reset last message + e.lastMessage = nil + // Call core iterator + e.coreIterator.Iterate(batchData, batchInfo) + // Call handler if last message is not nil + if e.lastMessage != nil { + e.handler(e.lastMessage) } - - // Prepare iterator before processing messages in batch - i.prepareVars(batchInfo) - - // Store last timestamp message here - var lastMessage Message - - for reader.Next() { - // Increase message index (can be overwritten by batch info message) - i.messageInfo.Index++ - - msg := reader.Message() - - // Preprocess "system" messages - if _, ok := i.preFilter[msg.TypeID()]; ok { - msg = msg.Decode() - if msg == nil { - log.Printf("decode error, type: %d, info: %s", msg.TypeID(), i.batchInfo.Info()) - return - } - msg = transformDeprecated(msg) - if err := i.preprocessing(msg); err != nil { - log.Printf("message preprocessing err: %s", err) - return - } - } - - // Skip messages we don't have in filter - if i.filter != nil { - if _, ok := i.filter[msg.TypeID()]; !ok { - continue - } - } - - if i.autoDecode { - msg = msg.Decode() - if msg == nil { - log.Printf("decode error, type: %d, info: %s", msg.TypeID(), i.batchInfo.Info()) - return - } - } - - // Set meta information for message - msg.Meta().SetMeta(i.messageInfo) - - // Update last timestamp message - lastMessage = msg - } - - if lastMessage != nil { - i.handler(lastMessage) - } - -} - -func (i *enderMessageIteratorImpl) zeroTsLog(msgType string) { - log.Printf("zero timestamp in %s, info: %s", msgType, i.batchInfo.Info()) -} - -func (i *enderMessageIteratorImpl) preprocessing(msg Message) error { - switch m := msg.(type) { - case *BatchMetadata: - if i.messageInfo.Index > 1 { // Might be several 0-0 BatchMeta in a row without an error though - return fmt.Errorf("batchMetadata found at the end of the batch, info: %s", i.batchInfo.Info()) - } - if m.Version > 1 { - return fmt.Errorf("incorrect batch version: %d, skip current batch, info: %s", i.version, i.batchInfo.Info()) - } - i.messageInfo.Index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha) - i.messageInfo.Timestamp = m.Timestamp - if m.Timestamp == 0 { - i.zeroTsLog("BatchMetadata") - } - i.messageInfo.Url = m.Location - i.version = m.Version - i.batchInfo.version = m.Version - - case *BatchMeta: // Is not required to be present in batch since IOS doesn't have it (though we might change it) - if i.messageInfo.Index > 1 { // Might be several 0-0 BatchMeta in a row without an error though - return fmt.Errorf("batchMeta found at the end of the batch, info: %s", i.batchInfo.Info()) - } - i.messageInfo.Index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha) - i.messageInfo.Timestamp = m.Timestamp - if m.Timestamp == 0 { - i.zeroTsLog("BatchMeta") - } - // Try to get saved session's page url - if savedURL := i.urls.Get(i.messageInfo.batch.sessionID); savedURL != "" { - i.messageInfo.Url = savedURL - } - - case *Timestamp: - i.messageInfo.Timestamp = int64(m.Timestamp) - if m.Timestamp == 0 { - i.zeroTsLog("Timestamp") - } - - case *SessionStart: - i.messageInfo.Timestamp = int64(m.Timestamp) - if m.Timestamp == 0 { - i.zeroTsLog("SessionStart") - log.Printf("zero session start, project: %d, UA: %s, tracker: %s, info: %s", - m.ProjectID, m.UserAgent, m.TrackerVersion, i.batchInfo.Info()) - } - - case *SessionEnd: - i.messageInfo.Timestamp = int64(m.Timestamp) - if m.Timestamp == 0 { - i.zeroTsLog("SessionEnd") - } - // Delete session from urls cache layer - i.urls.Delete(i.messageInfo.batch.sessionID) - - case *SetPageLocation: - i.messageInfo.Url = m.URL - // Save session page url in cache for using in next batches - i.urls.Set(i.messageInfo.batch.sessionID, m.URL) - } - return nil } diff --git a/backend/pkg/messages/iterator-sink.go b/backend/pkg/messages/iterator-sink.go index be12b63eb..de33f5438 100644 --- a/backend/pkg/messages/iterator-sink.go +++ b/backend/pkg/messages/iterator-sink.go @@ -1,181 +1,31 @@ package messages import ( - "fmt" - "log" "openreplay/backend/pkg/metrics/sink" ) -type sinkMessageIteratorImpl struct { - filter map[int]struct{} - preFilter map[int]struct{} - handler MessageHandler - autoDecode bool - version uint64 - size uint64 - canSkip bool - broken bool - messageInfo *message - batchInfo *BatchInfo - urls *pageLocations +type sinkIteratorImpl struct { + coreIterator MessageIterator + handler MessageHandler } func NewSinkMessageIterator(messageHandler MessageHandler, messageFilter []int, autoDecode bool) MessageIterator { - iter := &sinkMessageIteratorImpl{ - handler: messageHandler, - autoDecode: autoDecode, - urls: NewPageLocations(), - } - if len(messageFilter) != 0 { - filter := make(map[int]struct{}, len(messageFilter)) - for _, msgType := range messageFilter { - filter[msgType] = struct{}{} - } - iter.filter = filter - } - iter.preFilter = map[int]struct{}{ - MsgBatchMetadata: {}, MsgBatchMeta: {}, MsgTimestamp: {}, - MsgSessionStart: {}, MsgSessionEnd: {}, MsgSetPageLocation: {}, + iter := &sinkIteratorImpl{ + handler: messageHandler, } + iter.coreIterator = NewMessageIterator(iter.handle, messageFilter, autoDecode) return iter } -func (i *sinkMessageIteratorImpl) prepareVars(batchInfo *BatchInfo) { - i.batchInfo = batchInfo - i.messageInfo = &message{batch: batchInfo} - i.version = 0 - i.canSkip = false - i.broken = false - i.size = 0 +func (i *sinkIteratorImpl) handle(message Message) { + i.handler(message) } -func (i *sinkMessageIteratorImpl) sendBatchEnd() { - i.handler(nil) -} - -func (i *sinkMessageIteratorImpl) Iterate(batchData []byte, batchInfo *BatchInfo) { +func (i *sinkIteratorImpl) Iterate(batchData []byte, batchInfo *BatchInfo) { sink.RecordBatchSize(float64(len(batchData))) sink.IncreaseTotalBatches() - // Create new message reader - reader := NewMessageReader(batchData) - - // Pre-decode batch data - if err := reader.Parse(); err != nil { - log.Printf("pre-decode batch err: %s, info: %s", err, batchInfo.Info()) - return - } - - // Prepare iterator before processing messages in batch - i.prepareVars(batchInfo) - - for reader.Next() { - // Increase message index (can be overwritten by batch info message) - i.messageInfo.Index++ - - msg := reader.Message() - - // Preprocess "system" messages - if _, ok := i.preFilter[msg.TypeID()]; ok { - msg = msg.Decode() - if msg == nil { - log.Printf("decode error, type: %d, info: %s", msg.TypeID(), i.batchInfo.Info()) - break - } - msg = transformDeprecated(msg) - if err := i.preprocessing(msg); err != nil { - log.Printf("message preprocessing err: %s", err) - break - } - } - - // Skip messages we don't have in filter - if i.filter != nil { - if _, ok := i.filter[msg.TypeID()]; !ok { - continue - } - } - - if i.autoDecode { - msg = msg.Decode() - if msg == nil { - log.Printf("decode error, type: %d, info: %s", msg.TypeID(), i.batchInfo.Info()) - break - } - } - - // Set meta information for message - msg.Meta().SetMeta(i.messageInfo) - - // Process message - i.handler(msg) - } - - // Inform sink about end of batch - i.sendBatchEnd() -} - -func (i *sinkMessageIteratorImpl) zeroTsLog(msgType string) { - log.Printf("zero timestamp in %s, info: %s", msgType, i.batchInfo.Info()) -} - -func (i *sinkMessageIteratorImpl) preprocessing(msg Message) error { - switch m := msg.(type) { - case *BatchMetadata: - if i.messageInfo.Index > 1 { // Might be several 0-0 BatchMeta in a row without an error though - return fmt.Errorf("batchMetadata found at the end of the batch, info: %s", i.batchInfo.Info()) - } - if m.Version > 1 { - return fmt.Errorf("incorrect batch version: %d, skip current batch, info: %s", i.version, i.batchInfo.Info()) - } - i.messageInfo.Index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha) - i.messageInfo.Timestamp = m.Timestamp - if m.Timestamp == 0 { - i.zeroTsLog("BatchMetadata") - } - i.messageInfo.Url = m.Location - i.version = m.Version - i.batchInfo.version = m.Version - - case *BatchMeta: // Is not required to be present in batch since IOS doesn't have it (though we might change it) - if i.messageInfo.Index > 1 { // Might be several 0-0 BatchMeta in a row without an error though - return fmt.Errorf("batchMeta found at the end of the batch, info: %s", i.batchInfo.Info()) - } - i.messageInfo.Index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha) - i.messageInfo.Timestamp = m.Timestamp - if m.Timestamp == 0 { - i.zeroTsLog("BatchMeta") - } - // Try to get saved session's page url - if savedURL := i.urls.Get(i.messageInfo.batch.sessionID); savedURL != "" { - i.messageInfo.Url = savedURL - } - - case *Timestamp: - i.messageInfo.Timestamp = int64(m.Timestamp) - if m.Timestamp == 0 { - i.zeroTsLog("Timestamp") - } - - case *SessionStart: - i.messageInfo.Timestamp = int64(m.Timestamp) - if m.Timestamp == 0 { - i.zeroTsLog("SessionStart") - log.Printf("zero session start, project: %d, UA: %s, tracker: %s, info: %s", - m.ProjectID, m.UserAgent, m.TrackerVersion, i.batchInfo.Info()) - } - - case *SessionEnd: - i.messageInfo.Timestamp = int64(m.Timestamp) - if m.Timestamp == 0 { - i.zeroTsLog("SessionEnd") - } - // Delete session from urls cache layer - i.urls.Delete(i.messageInfo.batch.sessionID) - - case *SetPageLocation: - i.messageInfo.Url = m.URL - // Save session page url in cache for using in next batches - i.urls.Set(i.messageInfo.batch.sessionID, m.URL) - } - return nil + // Call core iterator + i.coreIterator.Iterate(batchData, batchInfo) + // Send batch end signal + i.handler(nil) } diff --git a/backend/pkg/messages/iterator.go b/backend/pkg/messages/iterator.go index f7b014d30..4a39a7fce 100644 --- a/backend/pkg/messages/iterator.go +++ b/backend/pkg/messages/iterator.go @@ -108,11 +108,20 @@ func (i *messageIteratorImpl) Iterate(batchData []byte, batchInfo *BatchInfo) { // Set meta information for message msg.Meta().SetMeta(i.messageInfo) + // Update timestamp value for iOS message types + if IsIOSType(msgType) { + msg.Meta().Timestamp = i.getIOSTimestamp(msg) + } + // Process message i.handler(msg) } } +func (i *messageIteratorImpl) getIOSTimestamp(msg Message) uint64 { + return GetTimestamp(msg) +} + func (i *messageIteratorImpl) zeroTsLog(msgType string) { log.Printf("zero timestamp in %s, info: %s", msgType, i.batchInfo.Info()) } @@ -127,7 +136,7 @@ func (i *messageIteratorImpl) preprocessing(msg Message) error { return fmt.Errorf("incorrect batch version: %d, skip current batch, info: %s", i.version, i.batchInfo.Info()) } i.messageInfo.Index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha) - i.messageInfo.Timestamp = m.Timestamp + i.messageInfo.Timestamp = uint64(m.Timestamp) if m.Timestamp == 0 { i.zeroTsLog("BatchMetadata") } @@ -140,7 +149,7 @@ func (i *messageIteratorImpl) preprocessing(msg Message) error { return fmt.Errorf("batchMeta found at the end of the batch, info: %s", i.batchInfo.Info()) } i.messageInfo.Index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha) - i.messageInfo.Timestamp = m.Timestamp + i.messageInfo.Timestamp = uint64(m.Timestamp) if m.Timestamp == 0 { i.zeroTsLog("BatchMeta") } @@ -150,13 +159,13 @@ func (i *messageIteratorImpl) preprocessing(msg Message) error { } case *Timestamp: - i.messageInfo.Timestamp = int64(m.Timestamp) + i.messageInfo.Timestamp = m.Timestamp if m.Timestamp == 0 { i.zeroTsLog("Timestamp") } case *SessionStart: - i.messageInfo.Timestamp = int64(m.Timestamp) + i.messageInfo.Timestamp = m.Timestamp if m.Timestamp == 0 { i.zeroTsLog("SessionStart") log.Printf("zero session start, project: %d, UA: %s, tracker: %s, info: %s", @@ -164,7 +173,7 @@ func (i *messageIteratorImpl) preprocessing(msg Message) error { } case *SessionEnd: - i.messageInfo.Timestamp = int64(m.Timestamp) + i.messageInfo.Timestamp = m.Timestamp if m.Timestamp == 0 { i.zeroTsLog("SessionEnd") } diff --git a/backend/pkg/messages/legacy-message-transform.go b/backend/pkg/messages/legacy-message-transform.go index 223178d15..a267d1f26 100644 --- a/backend/pkg/messages/legacy-message-transform.go +++ b/backend/pkg/messages/legacy-message-transform.go @@ -30,6 +30,19 @@ func transformDeprecated(msg Message) Message { Payload: m.Payload, URL: "", } + case *ResourceTimingDeprecated: + return &ResourceTiming{ + Timestamp: m.Timestamp, + Duration: m.Duration, + TTFB: m.TTFB, + HeaderSize: m.HeaderSize, + EncodedBodySize: m.EncodedBodySize, + DecodedBodySize: m.DecodedBodySize, + URL: m.URL, + Initiator: m.Initiator, + TransferredSize: 0, + Cached: false, + } } return msg } diff --git a/backend/pkg/messages/message.go b/backend/pkg/messages/message.go index 3a8e029d5..6ae02e6c5 100644 --- a/backend/pkg/messages/message.go +++ b/backend/pkg/messages/message.go @@ -8,6 +8,8 @@ type Message interface { TypeID() int Meta() *message SessionID() uint64 + MsgID() uint64 + Time() uint64 } // BatchInfo represents common information for all messages inside data batch @@ -47,7 +49,7 @@ func (b *BatchInfo) Info() string { } type message struct { - Timestamp int64 + Timestamp uint64 Index uint64 Url string batch *BatchInfo @@ -72,6 +74,14 @@ func (m *message) SessionID() uint64 { return m.batch.sessionID } +func (m *message) MsgID() uint64 { + return m.Meta().Index +} + +func (m *message) Time() uint64 { + return m.Meta().Timestamp +} + func (m *message) SetSessionID(sessID uint64) { if m.batch == nil { m.batch = &BatchInfo{} diff --git a/backend/pkg/messages/messages.go b/backend/pkg/messages/messages.go index a96f98de8..7a51c6ac9 100644 --- a/backend/pkg/messages/messages.go +++ b/backend/pkg/messages/messages.go @@ -34,8 +34,6 @@ const ( MsgMetadata = 30 MsgPageEvent = 31 MsgInputEvent = 32 - MsgClickEvent = 33 - MsgResourceEvent = 35 MsgCSSInsertRule = 37 MsgCSSDeleteRule = 38 MsgFetch = 39 @@ -50,8 +48,7 @@ const ( MsgPerformanceTrack = 49 MsgStringDict = 50 MsgSetNodeAttributeDict = 51 - MsgDOMDrop = 52 - MsgResourceTiming = 53 + MsgResourceTimingDeprecated = 53 MsgConnectionInformation = 54 MsgSetPageVisibility = 55 MsgPerformanceTrackAggr = 56 @@ -79,6 +76,11 @@ const ( MsgBatchMeta = 80 MsgBatchMetadata = 81 MsgPartitionedMessage = 82 + MsgInputChange = 112 + MsgSelectionChange = 113 + MsgMouseThrashing = 114 + MsgUnbindNodes = 115 + MsgResourceTiming = 116 MsgIssueEvent = 125 MsgSessionEnd = 126 MsgSessionSearch = 127 @@ -944,78 +946,6 @@ func (msg *InputEvent) TypeID() int { return 32 } -type ClickEvent struct { - message - MessageID uint64 - Timestamp uint64 - HesitationTime uint64 - Label string - Selector string -} - -func (msg *ClickEvent) Encode() []byte { - buf := make([]byte, 51+len(msg.Label)+len(msg.Selector)) - buf[0] = 33 - p := 1 - p = WriteUint(msg.MessageID, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.HesitationTime, buf, p) - p = WriteString(msg.Label, buf, p) - p = WriteString(msg.Selector, buf, p) - return buf[:p] -} - -func (msg *ClickEvent) Decode() Message { - return msg -} - -func (msg *ClickEvent) TypeID() int { - return 33 -} - -type ResourceEvent struct { - message - MessageID uint64 - Timestamp uint64 - Duration uint64 - TTFB uint64 - HeaderSize uint64 - EncodedBodySize uint64 - DecodedBodySize uint64 - URL string - Type string - Success bool - Method string - Status uint64 -} - -func (msg *ResourceEvent) Encode() []byte { - buf := make([]byte, 121+len(msg.URL)+len(msg.Type)+len(msg.Method)) - buf[0] = 35 - p := 1 - p = WriteUint(msg.MessageID, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Duration, buf, p) - p = WriteUint(msg.TTFB, buf, p) - p = WriteUint(msg.HeaderSize, buf, p) - p = WriteUint(msg.EncodedBodySize, buf, p) - p = WriteUint(msg.DecodedBodySize, buf, p) - p = WriteString(msg.URL, buf, p) - p = WriteString(msg.Type, buf, p) - p = WriteBoolean(msg.Success, buf, p) - p = WriteString(msg.Method, buf, p) - p = WriteUint(msg.Status, buf, p) - return buf[:p] -} - -func (msg *ResourceEvent) Decode() Message { - return msg -} - -func (msg *ResourceEvent) TypeID() int { - return 35 -} - type CSSInsertRule struct { message ID uint64 @@ -1366,28 +1296,7 @@ func (msg *SetNodeAttributeDict) TypeID() int { return 51 } -type DOMDrop struct { - message - Timestamp uint64 -} - -func (msg *DOMDrop) Encode() []byte { - buf := make([]byte, 11) - buf[0] = 52 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - return buf[:p] -} - -func (msg *DOMDrop) Decode() Message { - return msg -} - -func (msg *DOMDrop) TypeID() int { - return 52 -} - -type ResourceTiming struct { +type ResourceTimingDeprecated struct { message Timestamp uint64 Duration uint64 @@ -1399,7 +1308,7 @@ type ResourceTiming struct { Initiator string } -func (msg *ResourceTiming) Encode() []byte { +func (msg *ResourceTimingDeprecated) Encode() []byte { buf := make([]byte, 81+len(msg.URL)+len(msg.Initiator)) buf[0] = 53 p := 1 @@ -1414,11 +1323,11 @@ func (msg *ResourceTiming) Encode() []byte { return buf[:p] } -func (msg *ResourceTiming) Decode() Message { +func (msg *ResourceTimingDeprecated) Decode() Message { return msg } -func (msg *ResourceTiming) TypeID() int { +func (msg *ResourceTimingDeprecated) TypeID() int { return 53 } @@ -2117,6 +2026,143 @@ func (msg *PartitionedMessage) TypeID() int { return 82 } +type InputChange struct { + message + ID uint64 + Value string + ValueMasked bool + Label string + HesitationTime int64 + InputDuration int64 +} + +func (msg *InputChange) Encode() []byte { + buf := make([]byte, 61+len(msg.Value)+len(msg.Label)) + buf[0] = 112 + p := 1 + p = WriteUint(msg.ID, buf, p) + p = WriteString(msg.Value, buf, p) + p = WriteBoolean(msg.ValueMasked, buf, p) + p = WriteString(msg.Label, buf, p) + p = WriteInt(msg.HesitationTime, buf, p) + p = WriteInt(msg.InputDuration, buf, p) + return buf[:p] +} + +func (msg *InputChange) Decode() Message { + return msg +} + +func (msg *InputChange) TypeID() int { + return 112 +} + +type SelectionChange struct { + message + SelectionStart uint64 + SelectionEnd uint64 + Selection string +} + +func (msg *SelectionChange) Encode() []byte { + buf := make([]byte, 31+len(msg.Selection)) + buf[0] = 113 + p := 1 + p = WriteUint(msg.SelectionStart, buf, p) + p = WriteUint(msg.SelectionEnd, buf, p) + p = WriteString(msg.Selection, buf, p) + return buf[:p] +} + +func (msg *SelectionChange) Decode() Message { + return msg +} + +func (msg *SelectionChange) TypeID() int { + return 113 +} + +type MouseThrashing struct { + message + Timestamp uint64 +} + +func (msg *MouseThrashing) Encode() []byte { + buf := make([]byte, 11) + buf[0] = 114 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + return buf[:p] +} + +func (msg *MouseThrashing) Decode() Message { + return msg +} + +func (msg *MouseThrashing) TypeID() int { + return 114 +} + +type UnbindNodes struct { + message + TotalRemovedPercent uint64 +} + +func (msg *UnbindNodes) Encode() []byte { + buf := make([]byte, 11) + buf[0] = 115 + p := 1 + p = WriteUint(msg.TotalRemovedPercent, buf, p) + return buf[:p] +} + +func (msg *UnbindNodes) Decode() Message { + return msg +} + +func (msg *UnbindNodes) TypeID() int { + return 115 +} + +type ResourceTiming struct { + message + Timestamp uint64 + Duration uint64 + TTFB uint64 + HeaderSize uint64 + EncodedBodySize uint64 + DecodedBodySize uint64 + URL string + Initiator string + TransferredSize uint64 + Cached bool +} + +func (msg *ResourceTiming) Encode() []byte { + buf := make([]byte, 101+len(msg.URL)+len(msg.Initiator)) + buf[0] = 116 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Duration, buf, p) + p = WriteUint(msg.TTFB, buf, p) + p = WriteUint(msg.HeaderSize, buf, p) + p = WriteUint(msg.EncodedBodySize, buf, p) + p = WriteUint(msg.DecodedBodySize, buf, p) + p = WriteString(msg.URL, buf, p) + p = WriteString(msg.Initiator, buf, p) + p = WriteUint(msg.TransferredSize, buf, p) + p = WriteBoolean(msg.Cached, buf, p) + return buf[:p] +} + +func (msg *ResourceTiming) Decode() Message { + return msg +} + +func (msg *ResourceTiming) TypeID() int { + return 116 +} + type IssueEvent struct { message MessageID uint64 diff --git a/backend/pkg/messages/primitives.go b/backend/pkg/messages/primitives.go index 3e47a3943..921239287 100644 --- a/backend/pkg/messages/primitives.go +++ b/backend/pkg/messages/primitives.go @@ -4,6 +4,7 @@ import ( "errors" "fmt" "io" + "math" ) var ( @@ -50,6 +51,15 @@ func WriteUint(v uint64, buf []byte, p int) int { return p + 1 } +func ByteSizeUint(v uint64) int { + if v == 0 { + return 1 + } + nBits := math.Floor(math.Log2(float64(v))) + 1 + nBytes := math.Ceil(nBits / 7) + return int(nBytes) +} + func ReadInt(reader io.Reader) (int64, error) { ux, err := ReadUint(reader) x := int64(ux >> 1) diff --git a/backend/pkg/messages/raw.go b/backend/pkg/messages/raw.go index 44f666c69..ae8b97365 100644 --- a/backend/pkg/messages/raw.go +++ b/backend/pkg/messages/raw.go @@ -42,3 +42,17 @@ func (m *RawMessage) SessionID() uint64 { } return 0 } + +func (m *RawMessage) MsgID() uint64 { + if m.meta != nil { + return m.meta.Index + } + return 0 +} + +func (m *RawMessage) Time() uint64 { + if m.meta != nil { + return m.meta.Timestamp + } + return 0 +} diff --git a/backend/pkg/messages/read-message.go b/backend/pkg/messages/read-message.go index ecc00183f..a51200dc0 100644 --- a/backend/pkg/messages/read-message.go +++ b/backend/pkg/messages/read-message.go @@ -546,69 +546,6 @@ func DecodeInputEvent(reader BytesReader) (Message, error) { return msg, err } -func DecodeClickEvent(reader BytesReader) (Message, error) { - var err error = nil - msg := &ClickEvent{} - if msg.MessageID, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.HesitationTime, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.Label, err = reader.ReadString(); err != nil { - return nil, err - } - if msg.Selector, err = reader.ReadString(); err != nil { - return nil, err - } - return msg, err -} - -func DecodeResourceEvent(reader BytesReader) (Message, error) { - var err error = nil - msg := &ResourceEvent{} - if msg.MessageID, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.Duration, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.TTFB, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.HeaderSize, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.EncodedBodySize, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.DecodedBodySize, err = reader.ReadUint(); err != nil { - return nil, err - } - if msg.URL, err = reader.ReadString(); err != nil { - return nil, err - } - if msg.Type, err = reader.ReadString(); err != nil { - return nil, err - } - if msg.Success, err = reader.ReadBoolean(); err != nil { - return nil, err - } - if msg.Method, err = reader.ReadString(); err != nil { - return nil, err - } - if msg.Status, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err -} - func DecodeCSSInsertRule(reader BytesReader) (Message, error) { var err error = nil msg := &CSSInsertRule{} @@ -819,18 +756,9 @@ func DecodeSetNodeAttributeDict(reader BytesReader) (Message, error) { return msg, err } -func DecodeDOMDrop(reader BytesReader) (Message, error) { +func DecodeResourceTimingDeprecated(reader BytesReader) (Message, error) { var err error = nil - msg := &DOMDrop{} - if msg.Timestamp, err = reader.ReadUint(); err != nil { - return nil, err - } - return msg, err -} - -func DecodeResourceTiming(reader BytesReader) (Message, error) { - var err error = nil - msg := &ResourceTiming{} + msg := &ResourceTimingDeprecated{} if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } @@ -1293,6 +1221,99 @@ func DecodePartitionedMessage(reader BytesReader) (Message, error) { return msg, err } +func DecodeInputChange(reader BytesReader) (Message, error) { + var err error = nil + msg := &InputChange{} + if msg.ID, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.Value, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.ValueMasked, err = reader.ReadBoolean(); err != nil { + return nil, err + } + if msg.Label, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.HesitationTime, err = reader.ReadInt(); err != nil { + return nil, err + } + if msg.InputDuration, err = reader.ReadInt(); err != nil { + return nil, err + } + return msg, err +} + +func DecodeSelectionChange(reader BytesReader) (Message, error) { + var err error = nil + msg := &SelectionChange{} + if msg.SelectionStart, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.SelectionEnd, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.Selection, err = reader.ReadString(); err != nil { + return nil, err + } + return msg, err +} + +func DecodeMouseThrashing(reader BytesReader) (Message, error) { + var err error = nil + msg := &MouseThrashing{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } + return msg, err +} + +func DecodeUnbindNodes(reader BytesReader) (Message, error) { + var err error = nil + msg := &UnbindNodes{} + if msg.TotalRemovedPercent, err = reader.ReadUint(); err != nil { + return nil, err + } + return msg, err +} + +func DecodeResourceTiming(reader BytesReader) (Message, error) { + var err error = nil + msg := &ResourceTiming{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.Duration, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.TTFB, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.HeaderSize, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.EncodedBodySize, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.DecodedBodySize, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.URL, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.Initiator, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.TransferredSize, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.Cached, err = reader.ReadBoolean(); err != nil { + return nil, err + } + return msg, err +} + func DecodeIssueEvent(reader BytesReader) (Message, error) { var err error = nil msg := &IssueEvent{} @@ -1812,10 +1833,6 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) { return DecodePageEvent(reader) case 32: return DecodeInputEvent(reader) - case 33: - return DecodeClickEvent(reader) - case 35: - return DecodeResourceEvent(reader) case 37: return DecodeCSSInsertRule(reader) case 38: @@ -1844,10 +1861,8 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) { return DecodeStringDict(reader) case 51: return DecodeSetNodeAttributeDict(reader) - case 52: - return DecodeDOMDrop(reader) case 53: - return DecodeResourceTiming(reader) + return DecodeResourceTimingDeprecated(reader) case 54: return DecodeConnectionInformation(reader) case 55: @@ -1902,6 +1917,16 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) { return DecodeBatchMetadata(reader) case 82: return DecodePartitionedMessage(reader) + case 112: + return DecodeInputChange(reader) + case 113: + return DecodeSelectionChange(reader) + case 114: + return DecodeMouseThrashing(reader) + case 115: + return DecodeUnbindNodes(reader) + case 116: + return DecodeResourceTiming(reader) case 125: return DecodeIssueEvent(reader) case 126: diff --git a/backend/pkg/messages/reader.go b/backend/pkg/messages/reader.go index 1e9fa42db..436876e55 100644 --- a/backend/pkg/messages/reader.go +++ b/backend/pkg/messages/reader.go @@ -70,13 +70,15 @@ func (m *messageReaderImpl) Parse() (err error) { } // Dirty hack to avoid extra memory allocation - m.data[curr-1] = uint8(m.msgType) + mTypeByteSize := ByteSizeUint(m.msgType) + from := int(curr) - mTypeByteSize + WriteUint(m.msgType, m.data, from) // Add message meta to list m.list = append(m.list, &MessageMeta{ msgType: m.msgType, msgSize: m.msgSize + 1, - msgFrom: uint64(curr - 1), + msgFrom: uint64(from), }) // Update data pointer diff --git a/backend/pkg/metrics/heuristics/metrics.go b/backend/pkg/metrics/heuristics/metrics.go new file mode 100644 index 000000000..61a84dc49 --- /dev/null +++ b/backend/pkg/metrics/heuristics/metrics.go @@ -0,0 +1,22 @@ +package heuristics + +import "github.com/prometheus/client_golang/prometheus" + +var heuristicsTotalEvents = prometheus.NewCounterVec( + prometheus.CounterOpts{ + Namespace: "heuristics", + Name: "events_total", + Help: "A counter displaying the number of all processed events", + }, + []string{"type"}, +) + +func IncreaseTotalEvents(eventType string) { + heuristicsTotalEvents.WithLabelValues(eventType).Inc() +} + +func List() []prometheus.Collector { + return []prometheus.Collector{ + heuristicsTotalEvents, + } +} diff --git a/backend/pkg/metrics/storage/metrics.go b/backend/pkg/metrics/storage/metrics.go index 26459c90d..2579d7e7c 100644 --- a/backend/pkg/metrics/storage/metrics.go +++ b/backend/pkg/metrics/storage/metrics.go @@ -31,6 +31,32 @@ func IncreaseStorageTotalSessions() { storageTotalSessions.Inc() } +var storageSkippedSessionSize = prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Namespace: "storage", + Name: "session_size_bytes", + Help: "A histogram displaying the size of each skipped session file in bytes.", + Buckets: common.DefaultSizeBuckets, + }, + []string{"file_type"}, +) + +func RecordSkippedSessionSize(fileSize float64, fileType string) { + storageSkippedSessionSize.WithLabelValues(fileType).Observe(fileSize) +} + +var storageTotalSkippedSessions = prometheus.NewCounter( + prometheus.CounterOpts{ + Namespace: "storage", + Name: "sessions_skipped_total", + Help: "A counter displaying the total number of all skipped sessions because of the size limits.", + }, +) + +func IncreaseStorageTotalSkippedSessions() { + storageTotalSkippedSessions.Inc() +} + var storageSessionReadDuration = prometheus.NewHistogramVec( prometheus.HistogramOpts{ Namespace: "storage", diff --git a/backend/pkg/sessions/builder.go b/backend/pkg/sessions/builder.go index d21fd890a..683e4ccb5 100644 --- a/backend/pkg/sessions/builder.go +++ b/backend/pkg/sessions/builder.go @@ -10,7 +10,7 @@ import ( type builder struct { sessionID uint64 - readyMsgs []Message + readyMsgs chan Message timestamp uint64 lastMessageID uint64 lastSystemTime time.Time @@ -18,20 +18,14 @@ type builder struct { ended bool } -func NewBuilder(sessionID uint64, handlers ...handlers.MessageProcessor) *builder { +func NewBuilder(sessionID uint64, events chan Message, handlers ...handlers.MessageProcessor) *builder { return &builder{ sessionID: sessionID, processors: handlers, + readyMsgs: events, } } -func (b *builder) iterateReadyMessages(iter func(msg Message)) { - for _, readyMsg := range b.readyMsgs { - iter(readyMsg) - } - b.readyMsgs = nil -} - func (b *builder) checkSessionEnd(message Message) { if _, isEnd := message.(*IOSSessionEnd); isEnd { b.ended = true @@ -41,34 +35,31 @@ func (b *builder) checkSessionEnd(message Message) { } } -func (b *builder) handleMessage(message Message, messageID uint64) { - if messageID < b.lastMessageID { +func (b *builder) handleMessage(m Message) { + if m.MsgID() < b.lastMessageID { // May happen in case of duplicated messages in kafka (if `idempotence: false`) - log.Printf("skip message with wrong msgID, sessID: %d, msgID: %d, lastID: %d", b.sessionID, messageID, b.lastMessageID) + log.Printf("skip message with wrong msgID, sessID: %d, msgID: %d, lastID: %d", b.sessionID, m.MsgID(), b.lastMessageID) return } - timestamp := GetTimestamp(message) - if timestamp == 0 { - switch message.(type) { + if m.Time() <= 0 { + switch m.(type) { case *IssueEvent, *PerformanceTrackAggr: break default: - log.Printf("skip message with empty timestamp, sessID: %d, msgID: %d, msgType: %d", b.sessionID, messageID, message.TypeID()) + log.Printf("skip message with incorrect timestamp, sessID: %d, msgID: %d, msgType: %d", b.sessionID, m.MsgID(), m.TypeID()) } return } - if timestamp < b.timestamp { - //log.Printf("skip message with wrong timestamp, sessID: %d, msgID: %d, type: %d, msgTS: %d, lastTS: %d", b.sessionID, messageID, message.TypeID(), timestamp, b.timestamp) - } else { - b.timestamp = timestamp + if m.Time() > b.timestamp { + b.timestamp = m.Time() } - b.lastSystemTime = time.Now() + // Process current message for _, p := range b.processors { - if rm := p.Handle(message, messageID, b.timestamp); rm != nil { - rm.Meta().SetMeta(message.Meta()) - b.readyMsgs = append(b.readyMsgs, rm) + if rm := p.Handle(m, b.timestamp); rm != nil { + rm.Meta().SetMeta(m.Meta()) + b.readyMsgs <- rm } } - b.checkSessionEnd(message) + b.checkSessionEnd(m) } diff --git a/backend/pkg/sessions/builderMap.go b/backend/pkg/sessions/builderMap.go index 85e787929..9e66ce260 100644 --- a/backend/pkg/sessions/builderMap.go +++ b/backend/pkg/sessions/builderMap.go @@ -2,92 +2,98 @@ package sessions import ( "log" - "openreplay/backend/pkg/handlers" + "sync" "time" + "openreplay/backend/pkg/handlers" . "openreplay/backend/pkg/messages" ) -const FORCE_DELETE_TIMEOUT = 4 * time.Hour +const ForceDeleteTimeout = 30 * time.Minute type builderMap struct { handlersFabric func() []handlers.MessageProcessor sessions map[uint64]*builder + mutex *sync.Mutex + events chan Message + done chan struct{} } -func NewBuilderMap(handlersFabric func() []handlers.MessageProcessor) *builderMap { - return &builderMap{ +type EventBuilder interface { + Events() chan Message + HandleMessage(msg Message) + Stop() +} + +func NewBuilderMap(handlersFabric func() []handlers.MessageProcessor) EventBuilder { + b := &builderMap{ handlersFabric: handlersFabric, sessions: make(map[uint64]*builder), + mutex: &sync.Mutex{}, + events: make(chan Message, 1024*10), + done: make(chan struct{}), } -} - -func (m *builderMap) GetBuilder(sessionID uint64) *builder { - b := m.sessions[sessionID] - if b == nil { - b = NewBuilder(sessionID, m.handlersFabric()...) // Should create new instances - m.sessions[sessionID] = b - } + go b.worker() return b } -func (m *builderMap) HandleMessage(msg Message) { - sessionID := msg.SessionID() - messageID := msg.Meta().Index - b := m.GetBuilder(sessionID) - b.handleMessage(msg, messageID) +func (m *builderMap) getBuilder(sessionID uint64) *builder { + m.mutex.Lock() + b := m.sessions[sessionID] + if b == nil { + b = NewBuilder(sessionID, m.events, m.handlersFabric()...) + m.sessions[sessionID] = b + } + m.mutex.Unlock() + return b } -func (m *builderMap) ClearOldSessions() { +func (m *builderMap) Events() chan Message { + return m.events +} + +func (m *builderMap) HandleMessage(msg Message) { + m.getBuilder(msg.SessionID()).handleMessage(msg) +} + +func (m *builderMap) worker() { + tick := time.Tick(10 * time.Second) + for { + select { + case <-tick: + m.checkSessions() + case <-m.done: + return + } + } +} + +func (m *builderMap) checkSessions() { + m.mutex.Lock() deleted := 0 now := time.Now() - for id, sess := range m.sessions { - if sess.lastSystemTime.Add(FORCE_DELETE_TIMEOUT).Before(now) { - // Should delete zombie session - delete(m.sessions, id) + for sessID, b := range m.sessions { + // Check session's events + if b.ended || b.lastSystemTime.Add(ForceDeleteTimeout).Before(now) { + // Build rest of messages + for _, p := range b.processors { + if rm := p.Build(); rm != nil { + rm.Meta().SetSessionID(sessID) + m.events <- rm + } + } + delete(m.sessions, sessID) deleted++ } } + m.mutex.Unlock() if deleted > 0 { log.Printf("deleted %d sessions from message builder", deleted) } } -func (m *builderMap) iterateSessionReadyMessages(sessionID uint64, b *builder, iter func(msg Message)) { - if b.ended || b.lastSystemTime.Add(FORCE_DELETE_TIMEOUT).Before(time.Now()) { - for _, p := range b.processors { - if rm := p.Build(); rm != nil { - rm.Meta().SetSessionID(sessionID) - b.readyMsgs = append(b.readyMsgs, rm) - } - } - } - b.iterateReadyMessages(iter) - if b.ended { - delete(m.sessions, sessionID) - } -} - -func (m *builderMap) IterateReadyMessages(iter func(sessionID uint64, msg Message)) { - for sessionID, session := range m.sessions { - m.iterateSessionReadyMessages( - sessionID, - session, - func(msg Message) { - iter(sessionID, msg) - }, - ) - } -} - -func (m *builderMap) IterateSessionReadyMessages(sessionID uint64, iter func(msg Message)) { - session, ok := m.sessions[sessionID] - if !ok { - return - } - m.iterateSessionReadyMessages( - sessionID, - session, - iter, - ) +func (m *builderMap) Stop() { + m.done <- struct{}{} + m.checkSessions() + close(m.events) } diff --git a/backend/pkg/terminator/terminator.go b/backend/pkg/terminator/terminator.go new file mode 100644 index 000000000..29e106aa1 --- /dev/null +++ b/backend/pkg/terminator/terminator.go @@ -0,0 +1,22 @@ +package terminator + +import ( + "log" + "os" + "os/signal" + "syscall" +) + +// ServiceStopper is a common interface for all services +type ServiceStopper interface { + Stop() +} + +func Wait(s ServiceStopper) { + sigChan := make(chan os.Signal, 1) + signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM) + sig := <-sigChan + log.Printf("Caught signal %v: terminating\n", sig) + s.Stop() + os.Exit(0) +} diff --git a/backend/pkg/url/url.go b/backend/pkg/url/url.go index 654e803eb..30b3f2a7e 100644 --- a/backend/pkg/url/url.go +++ b/backend/pkg/url/url.go @@ -1,7 +1,7 @@ package url import ( - _url "net/url" + "net/url" "strings" ) @@ -11,7 +11,7 @@ func DiscardURLQuery(url string) string { func GetURLParts(rawURL string) (string, string, string, error) { rawURL = strings.Replace(rawURL, "\t", "", -1) // Other chars? - u, err := _url.Parse(rawURL) + u, err := url.Parse(rawURL) if err != nil { return "", "", "", err } @@ -22,3 +22,34 @@ func GetURLParts(rawURL string) (string, string, string, error) { } return u.Host, path, u.RawQuery, nil } + +func getURLExtension(URL string) string { + u, err := url.Parse(URL) + if err != nil { + return "" + } + i := strings.LastIndex(u.Path, ".") + return u.Path[i+1:] +} + +func GetResourceType(initiator string, URL string) string { + switch initiator { + case "xmlhttprequest", "fetch": + return "fetch" + case "img": + return "img" + default: + switch getURLExtension(URL) { + case "css": + return "stylesheet" + case "js": + return "script" + case "png", "gif", "jpg", "jpeg", "svg": + return "img" + case "mp4", "mkv", "ogg", "webm", "avi", "mp3": + return "media" + default: + return "other" + } + } +} diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 79aec2ade..1e342e8bc 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -215,6 +215,7 @@ Pipfile.lock /chalicelib/core/log_tool_sumologic.py /chalicelib/core/metadata.py /chalicelib/core/mobile.py +/chalicelib/core/sessions.py /chalicelib/core/sessions_assignments.py #exp /chalicelib/core/sessions_metas.py /chalicelib/core/sessions_mobs.py @@ -264,5 +265,8 @@ Pipfile.lock /app_alerts.py /build_alerts.sh /build_crons.sh +/run-dev.sh +/run-alerts-dev.sh +/routers/subs/health.py /routers/subs/v1_api.py #exp /chalicelib/core/dashboards.py diff --git a/ee/api/app.py b/ee/api/app.py index a1e203005..034d93565 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -1,5 +1,6 @@ import logging import queue +from contextlib import asynccontextmanager from apscheduler.schedulers.asyncio import AsyncIOScheduler from decouple import config @@ -10,17 +11,54 @@ from starlette import status from starlette.responses import StreamingResponse, JSONResponse from chalicelib.core import traces +from chalicelib.utils import events_queue from chalicelib.utils import helper from chalicelib.utils import pg_client -from chalicelib.utils import events_queue from routers import core, core_dynamic, ee, saml from routers.crons import core_crons from routers.crons import core_dynamic_crons from routers.crons import ee_crons from routers.subs import insights, metrics, v1_api_ee -from routers.subs import v1_api +from routers.subs import v1_api, health -app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default="")) +loglevel = config("LOGLEVEL", default=logging.INFO) +print(f">Loglevel set to: {loglevel}") +logging.basicConfig(level=loglevel) + + +@asynccontextmanager +async def lifespan(app: FastAPI): + # Startup + logging.info(">>>>> starting up <<<<<") + ap_logger = logging.getLogger('apscheduler') + ap_logger.setLevel(loglevel) + + app.schedule = AsyncIOScheduler() + app.queue_system = queue.Queue() + await pg_client.init() + await events_queue.init() + app.schedule.start() + + for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs + traces.cron_jobs + ee_crons.ee_cron_jobs: + app.schedule.add_job(id=job["func"].__name__, **job) + + ap_logger.info(">Scheduled jobs:") + for job in app.schedule.get_jobs(): + ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) + + # App listening + yield + + # Shutdown + logging.info(">>>>> shutting down <<<<<") + app.schedule.shutdown(wait=True) + await traces.process_traces_queue() + await events_queue.terminate() + await pg_client.terminate() + + +app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default=""), + lifespan=lifespan) app.add_middleware(GZipMiddleware, minimum_size=1000) @@ -68,43 +106,6 @@ app.include_router(metrics.app) app.include_router(insights.app) app.include_router(v1_api.app_apikey) app.include_router(v1_api_ee.app_apikey) - -loglevel = config("LOGLEVEL", default=logging.INFO) -print(f">Loglevel set to: {loglevel}") -logging.basicConfig(level=loglevel) -ap_logger = logging.getLogger('apscheduler') -ap_logger.setLevel(loglevel) -app.schedule = AsyncIOScheduler() -app.queue_system = queue.Queue() - - -@app.on_event("startup") -async def startup(): - logging.info(">>>>> starting up <<<<<") - await pg_client.init() - await events_queue.init() - app.schedule.start() - - for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs + traces.cron_jobs + ee_crons.ee_cron_jobs: - app.schedule.add_job(id=job["func"].__name__, **job) - - ap_logger.info(">Scheduled jobs:") - for job in app.schedule.get_jobs(): - ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) - - -@app.on_event("shutdown") -async def shutdown(): - logging.info(">>>>> shutting down <<<<<") - app.schedule.shutdown(wait=True) - await traces.process_traces_queue() - await events_queue.terminate() - await pg_client.terminate() - - -@app.get('/private/shutdown', tags=["private"]) -async def stop_server(): - logging.info("Requested shutdown") - await shutdown() - import os, signal - os.kill(1, signal.SIGTERM) +app.include_router(health.public_app) +app.include_router(health.app) +app.include_router(health.app_apikey) diff --git a/ee/api/chalicelib/core/custom_metrics.py b/ee/api/chalicelib/core/custom_metrics.py index b7502edd9..96d4b424f 100644 --- a/ee/api/chalicelib/core/custom_metrics.py +++ b/ee/api/chalicelib/core/custom_metrics.py @@ -2,8 +2,7 @@ import json from typing import Union from decouple import config -from fastapi import HTTPException -from starlette import status +from fastapi import HTTPException, status import schemas import schemas_ee diff --git a/ee/api/chalicelib/core/health.py b/ee/api/chalicelib/core/health.py new file mode 100644 index 000000000..26b3a2b24 --- /dev/null +++ b/ee/api/chalicelib/core/health.py @@ -0,0 +1,228 @@ +from urllib.parse import urlparse + +import redis +import requests +# from confluent_kafka.admin import AdminClient +from decouple import config + +from chalicelib.utils import pg_client, ch_client + +HEALTH_ENDPOINTS = { + "alerts": "http://alerts-openreplay.app.svc.cluster.local:8888/health", + "assets": "http://assets-openreplay.app.svc.cluster.local:8888/metrics", + "assist": "http://assist-openreplay.app.svc.cluster.local:8888/health", + "chalice": "http://chalice-openreplay.app.svc.cluster.local:8888/metrics", + "db": "http://db-openreplay.app.svc.cluster.local:8888/metrics", + "ender": "http://ender-openreplay.app.svc.cluster.local:8888/metrics", + "heuristics": "http://heuristics-openreplay.app.svc.cluster.local:8888/metrics", + "http": "http://http-openreplay.app.svc.cluster.local:8888/metrics", + "ingress-nginx": "http://ingress-nginx-openreplay.app.svc.cluster.local:8888/metrics", + "integrations": "http://integrations-openreplay.app.svc.cluster.local:8888/metrics", + "peers": "http://peers-openreplay.app.svc.cluster.local:8888/health", + "quickwit": "http://quickwit-openreplay.app.svc.cluster.local:8888/metrics", + "sink": "http://sink-openreplay.app.svc.cluster.local:8888/metrics", + "sourcemaps-reader": "http://sourcemapreader-openreplay.app.svc.cluster.local:8888/health", + "storage": "http://storage-openreplay.app.svc.cluster.local:8888/metrics", +} + + +def __check_database_pg(): + fail_response = { + "health": False, + "details": { + "errors": ["Postgres health-check failed"] + } + } + with pg_client.PostgresClient() as cur: + try: + cur.execute("SHOW server_version;") + server_version = cur.fetchone() + except Exception as e: + print("!! health failed: postgres not responding") + print(str(e)) + return fail_response + try: + cur.execute("SELECT openreplay_version() AS version;") + schema_version = cur.fetchone() + except Exception as e: + print("!! health failed: openreplay_version not defined") + print(str(e)) + return fail_response + return { + "health": True, + "details": { + # "version": server_version["server_version"], + # "schema": schema_version["version"] + } + } + + +def __not_supported(): + return {"errors": ["not supported"]} + + +def __always_healthy(): + return { + "health": True, + "details": {} + } + + +def __check_be_service(service_name): + def fn(): + fail_response = { + "health": False, + "details": { + "errors": ["server health-check failed"] + } + } + try: + results = requests.get(HEALTH_ENDPOINTS.get(service_name), timeout=2) + if results.status_code != 200: + print(f"!! issue with the {service_name}-health code:{results.status_code}") + print(results.text) + # fail_response["details"]["errors"].append(results.text) + return fail_response + except requests.exceptions.Timeout: + print(f"!! Timeout getting {service_name}-health") + # fail_response["details"]["errors"].append("timeout") + return fail_response + except Exception as e: + print(f"!! Issue getting {service_name}-health response") + print(str(e)) + try: + print(results.text) + # fail_response["details"]["errors"].append(results.text) + except: + print("couldn't get response") + # fail_response["details"]["errors"].append(str(e)) + return fail_response + return { + "health": True, + "details": {} + } + + return fn + + +def __check_redis(): + fail_response = { + "health": False, + "details": {"errors": ["server health-check failed"]} + } + if config("REDIS_STRING", default=None) is None: + # fail_response["details"]["errors"].append("REDIS_STRING not defined in env-vars") + return fail_response + + try: + u = urlparse(config("REDIS_STRING")) + r = redis.Redis(host=u.hostname, port=u.port, socket_timeout=2) + r.ping() + except Exception as e: + print("!! Issue getting redis-health response") + print(str(e)) + # fail_response["details"]["errors"].append(str(e)) + return fail_response + + return { + "health": True, + "details": { + # "version": r.execute_command('INFO')['redis_version'] + } + } + + +def get_health(): + health_map = { + "databases": { + "postgres": __check_database_pg, + "clickhouse": __check_database_ch + }, + "ingestionPipeline": { + "redis": __check_redis, + # "kafka": __check_kafka + "kafka": __always_healthy + }, + "backendServices": { + "alerts": __check_be_service("alerts"), + "assets": __check_be_service("assets"), + "assist": __check_be_service("assist"), + "chalice": __always_healthy, + "db": __check_be_service("db"), + "ender": __check_be_service("ender"), + "frontend": __always_healthy, + "heuristics": __check_be_service("heuristics"), + "http": __check_be_service("http"), + "ingress-nginx": __always_healthy, + "integrations": __check_be_service("integrations"), + "peers": __check_be_service("peers"), + # "quickwit": __check_be_service("quickwit"), + "sink": __check_be_service("sink"), + "sourcemaps-reader": __check_be_service("sourcemaps-reader"), + "storage": __check_be_service("storage") + } + } + for parent_key in health_map.keys(): + for element_key in health_map[parent_key]: + health_map[parent_key][element_key] = health_map[parent_key][element_key]() + return health_map + + +def __check_database_ch(): + fail_response = { + "health": False, + "details": {"errors": ["server health-check failed"]} + } + with ch_client.ClickHouseClient() as ch: + try: + server_version = ch.execute("SELECT version() AS server_version;") + except Exception as e: + print("!! health failed: clickhouse not responding") + print(str(e)) + return fail_response + + schema_version = ch.execute("""SELECT 1 + FROM system.functions + WHERE name = 'openreplay_version';""") + if len(schema_version) > 0: + schema_version = ch.execute("SELECT openreplay_version()() AS version;") + schema_version = schema_version[0]["version"] + else: + print("!! health failed: clickhouse schema is outdated") + schema_version = "unknown" + # fail_response["details"]["errors"].append("clickhouse schema is outdated") + return fail_response + return { + "health": True, + "details": { + # "version": server_version[0]["server_version"], + # "schema": schema_version, + # **errors + } + } + +# def __check_kafka(): +# fail_response = { +# "health": False, +# "details": {"errors": ["server health-check failed"]} +# } +# if config("KAFKA_SERVERS", default=None) is None: +# fail_response["details"]["errors"].append("KAFKA_SERVERS not defined in env-vars") +# return fail_response +# +# try: +# a = AdminClient({'bootstrap.servers': config("KAFKA_SERVERS"), "socket.connection.setup.timeout.ms": 3000}) +# topics = a.list_topics().topics +# if not topics: +# raise Exception('topics not found') +# +# except Exception as e: +# print("!! Issue getting kafka-health response") +# print(str(e)) +# fail_response["details"]["errors"].append(str(e)) +# return fail_response +# +# return { +# "health": True, +# "details": {} +# } diff --git a/ee/api/chalicelib/core/projects.py b/ee/api/chalicelib/core/projects.py index dc06703ce..467f4185e 100644 --- a/ee/api/chalicelib/core/projects.py +++ b/ee/api/chalicelib/core/projects.py @@ -1,8 +1,7 @@ import json from typing import Optional -from fastapi import HTTPException -from starlette import status +from fastapi import HTTPException, status import schemas from chalicelib.core import users @@ -55,6 +54,7 @@ def __create(tenant_id, name): def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, stack_integrations=False, user_id=None): + stack_integrations = False with pg_client.PostgresClient() as cur: role_query = """INNER JOIN LATERAL (SELECT 1 FROM users diff --git a/ee/api/chalicelib/core/roles.py b/ee/api/chalicelib/core/roles.py index 79f1caec7..077bb2464 100644 --- a/ee/api/chalicelib/core/roles.py +++ b/ee/api/chalicelib/core/roles.py @@ -1,7 +1,6 @@ from typing import Optional -from fastapi import HTTPException -from starlette import status +from fastapi import HTTPException, status import schemas_ee from chalicelib.core import users, projects diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py deleted file mode 100644 index 6d92c3954..000000000 --- a/ee/api/chalicelib/core/sessions.py +++ /dev/null @@ -1,1215 +0,0 @@ -from typing import List - -import schemas -import schemas_ee -from chalicelib.core import events, metadata, events_ios, \ - sessions_mobs, issues, projects, resources, assist, performance_event, sessions_favorite, \ - sessions_devtool, sessions_notes -from chalicelib.utils import errors_helper -from chalicelib.utils import pg_client, helper, metrics_helper -from chalicelib.utils import sql_helper as sh - -SESSION_PROJECTION_COLS = """s.project_id, -s.session_id::text AS session_id, -s.user_uuid, -s.user_id, -s.user_os, -s.user_browser, -s.user_device, -s.user_device_type, -s.user_country, -s.start_ts, -s.duration, -s.events_count, -s.pages_count, -s.errors_count, -s.user_anonymous_id, -s.platform, -s.issue_score, -to_jsonb(s.issue_types) AS issue_types, -favorite_sessions.session_id NOTNULL AS favorite, -COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """ - - -def __group_metadata(session, project_metadata): - meta = {} - for m in project_metadata.keys(): - if project_metadata[m] is not None and session.get(m) is not None: - meta[project_metadata[m]] = session[m] - session.pop(m) - return meta - - -def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, - include_fav_viewed=False, group_metadata=False, live=True): - with pg_client.PostgresClient() as cur: - extra_query = [] - if include_fav_viewed: - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_favorite_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS favorite""") - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS viewed""") - query = cur.mogrify( - f"""\ - SELECT - s.*, - s.session_id::text AS session_id, - (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key, - encode(file_key,'hex') AS file_key - {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} - {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} - FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} - WHERE s.project_id = %(project_id)s - AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": context.user_id} - ) - # print("===============") - # print(query) - cur.execute(query=query) - - data = cur.fetchone() - if data is not None: - data = helper.dict_to_camel_case(data) - if full_data: - if data["platform"] == 'ios': - data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) - for e in data['events']: - if e["type"].endswith("_IOS"): - e["type"] = e["type"][:-len("_IOS")] - data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) - data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, - session_id=session_id) - data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) - else: - data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, - group_clickrage=True) - all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) - data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] - # to keep only the first stack - # limit the number of errors to reduce the response-body size - data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors - if e['source'] == "js_exception"][:500] - data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, - session_id=session_id) - data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) - data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) - data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, - context=context) - data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, - start_ts=data["startTs"], duration=data["duration"]) - - data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, - session_id=session_id, user_id=context.user_id) - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) - data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) - data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, - project_key=data["projectKey"]) - data["inDB"] = True - return data - elif live: - return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) - else: - return None - - -# This function executes the query and return result -def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, - error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False): - if data.bookmarked: - data.startDate, data.endDate = sessions_favorite.get_start_end_timestamp(project_id, user_id) - - full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only, - favorite_only=data.bookmarked, issue=issue, project_id=project_id, - user_id=user_id) - if data.limit is not None and data.page is not None: - full_args["sessions_limit"] = data.limit - full_args["sessions_limit_s"] = (data.page - 1) * data.limit - full_args["sessions_limit_e"] = data.page * data.limit - else: - full_args["sessions_limit"] = 200 - full_args["sessions_limit_s"] = 1 - full_args["sessions_limit_e"] = 200 - - meta_keys = [] - with pg_client.PostgresClient() as cur: - if errors_only: - main_query = cur.mogrify(f"""SELECT DISTINCT er.error_id, - COALESCE((SELECT TRUE - FROM public.user_viewed_errors AS ve - WHERE er.error_id = ve.error_id - AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed - {query_part};""", full_args) - - elif count_only: - main_query = cur.mogrify(f"""SELECT COUNT(DISTINCT s.session_id) AS count_sessions, - COUNT(DISTINCT s.user_uuid) AS count_users - {query_part};""", full_args) - elif data.group_by_user: - g_sort = "count(full_sessions)" - if data.order is None: - data.order = schemas.SortOrderType.desc.value - else: - data.order = data.order.value - if data.sort is not None and data.sort != 'sessionsCount': - sort = helper.key_to_snake_case(data.sort) - g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})" - else: - sort = 'start_ts' - - meta_keys = metadata.get(project_id=project_id) - main_query = cur.mogrify(f"""SELECT COUNT(*) AS count, - COALESCE(JSONB_AGG(users_sessions) - FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions - FROM (SELECT user_id, - count(full_sessions) AS user_sessions_count, - jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session, - MIN(full_sessions.start_ts) AS first_session_ts, - ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order}) AS rn - FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order}) AS rn - FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} - {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} - {query_part} - ) AS filtred_sessions - ) AS full_sessions - GROUP BY user_id - ) AS users_sessions;""", - full_args) - elif ids_only: - main_query = cur.mogrify(f"""SELECT DISTINCT ON(s.session_id) s.session_id - {query_part} - ORDER BY s.session_id desc - LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s;""", - full_args) - else: - if data.order is None: - data.order = schemas.SortOrderType.desc.value - else: - data.order = data.order.value - sort = 'session_id' - if data.sort is not None and data.sort != "session_id": - # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) - sort = helper.key_to_snake_case(data.sort) - meta_keys = metadata.get(project_id=project_id) - main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, - COALESCE(JSONB_AGG(full_sessions) - FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions - FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order}, issue_score DESC) AS rn - FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} - {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} - {query_part} - ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""", - full_args) - # print("--------------------") - # print(main_query) - # print("--------------------") - try: - cur.execute(main_query) - except Exception as err: - print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------") - print(main_query.decode('UTF-8')) - print("--------- PAYLOAD -----------") - print(data.json()) - print("--------------------") - raise err - if errors_only or ids_only: - return helper.list_to_camel_case(cur.fetchall()) - - sessions = cur.fetchone() - if count_only: - return helper.dict_to_camel_case(sessions) - - total = sessions["count"] - sessions = sessions["sessions"] - - if data.group_by_user: - for i, s in enumerate(sessions): - sessions[i] = {**s.pop("last_session")[0], **s} - sessions[i].pop("rn") - sessions[i]["metadata"] = {k["key"]: sessions[i][f'metadata_{k["index"]}'] for k in meta_keys \ - if sessions[i][f'metadata_{k["index"]}'] is not None} - else: - for i, s in enumerate(sessions): - sessions[i]["metadata"] = {k["key"]: sessions[i][f'metadata_{k["index"]}'] for k in meta_keys \ - if sessions[i][f'metadata_{k["index"]}'] is not None} - # if not data.group_by_user and data.sort is not None and data.sort != "session_id": - # sessions = sorted(sessions, key=lambda s: s[helper.key_to_snake_case(data.sort)], - # reverse=data.order.upper() == "DESC") - return { - 'total': total, - 'sessions': helper.list_to_camel_case(sessions) - } - - -def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int, - view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType, - metric_of: schemas.MetricOfTable, metric_value: List): - step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endDate, startTimestamp=data.startDate, - density=density, factor=1, decimal=True)) - extra_event = None - if metric_of == schemas.MetricOfTable.visited_url: - extra_event = "events.pages" - elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0: - data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue, - operator=schemas.SearchEventOperator._is)) - full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False, - favorite_only=False, issue=None, project_id=project_id, - user_id=None, extra_event=extra_event) - full_args["step_size"] = step_size - sessions = [] - with pg_client.PostgresClient() as cur: - if metric_type == schemas.MetricType.timeseries: - if view_type == schemas.MetricTimeseriesViewType.line_chart: - main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT DISTINCT ON(s.session_id) s.session_id, s.start_ts - {query_part}) - SELECT generated_timestamp AS timestamp, - COUNT(s) AS count - FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp - LEFT JOIN LATERAL ( SELECT 1 AS s - FROM full_sessions - WHERE start_ts >= generated_timestamp - AND start_ts <= generated_timestamp + %(step_size)s) AS sessions ON (TRUE) - GROUP BY generated_timestamp - ORDER BY generated_timestamp;""", full_args) - else: - main_query = cur.mogrify(f"""SELECT count(DISTINCT s.session_id) AS count - {query_part};""", full_args) - - # print("--------------------") - # print(main_query) - # print("--------------------") - try: - cur.execute(main_query) - except Exception as err: - print("--------- SESSIONS-SERIES QUERY EXCEPTION -----------") - print(main_query.decode('UTF-8')) - print("--------- PAYLOAD -----------") - print(data.json()) - print("--------------------") - raise err - if view_type == schemas.MetricTimeseriesViewType.line_chart: - sessions = cur.fetchall() - else: - sessions = cur.fetchone()["count"] - elif metric_type == schemas.MetricType.table: - if isinstance(metric_of, schemas.MetricOfTable): - main_col = "user_id" - extra_col = "" - extra_where = "" - pre_query = "" - distinct_on = "s.session_id" - if metric_of == schemas.MetricOfTable.user_country: - main_col = "user_country" - elif metric_of == schemas.MetricOfTable.user_device: - main_col = "user_device" - elif metric_of == schemas.MetricOfTable.user_browser: - main_col = "user_browser" - elif metric_of == schemas.MetricOfTable.issues: - main_col = "issue" - extra_col = f", UNNEST(s.issue_types) AS {main_col}" - if len(metric_value) > 0: - extra_where = [] - for i in range(len(metric_value)): - arg_name = f"selected_issue_{i}" - extra_where.append(f"{main_col} = %({arg_name})s") - full_args[arg_name] = metric_value[i] - extra_where = f"WHERE ({' OR '.join(extra_where)})" - elif metric_of == schemas.MetricOfTable.visited_url: - main_col = "path" - extra_col = ", path" - distinct_on += ",path" - main_query = cur.mogrify(f"""{pre_query} - SELECT COUNT(*) AS count, COALESCE(JSONB_AGG(users_sessions) FILTER ( WHERE rn <= 200 ), '[]'::JSONB) AS values - FROM (SELECT {main_col} AS name, - count(DISTINCT session_id) AS session_count, - ROW_NUMBER() OVER (ORDER BY count(full_sessions) DESC) AS rn - FROM (SELECT * - FROM (SELECT DISTINCT ON({distinct_on}) s.session_id, s.user_uuid, - s.user_id, s.user_os, - s.user_browser, s.user_device, - s.user_device_type, s.user_country, s.issue_types{extra_col} - {query_part} - ORDER BY s.session_id desc) AS filtred_sessions - ) AS full_sessions - {extra_where} - GROUP BY {main_col} - ORDER BY session_count DESC) AS users_sessions;""", - full_args) - # print("--------------------") - # print(main_query) - # print("--------------------") - cur.execute(main_query) - sessions = cur.fetchone() - for s in sessions["values"]: - s.pop("rn") - sessions["values"] = helper.list_to_camel_case(sessions["values"]) - - return sessions - - -def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema): - return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.request_details, - schemas.EventType.graphql] \ - or event.type in [schemas.PerformanceEventType.location_dom_complete, - schemas.PerformanceEventType.location_largest_contentful_paint_time, - schemas.PerformanceEventType.location_ttfb, - schemas.PerformanceEventType.location_avg_cpu_load, - schemas.PerformanceEventType.location_avg_memory_usage - ] and (event.source is None or len(event.source) == 0) \ - or event.type in [schemas.EventType.request_details, schemas.EventType.graphql] and ( - event.filters is None or len(event.filters) == 0)) - - -# this function generates the query and return the generated-query with the dict of query arguments -def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue, - project_id, user_id, extra_event=None): - ss_constraints = [] - full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate, - "projectId": project_id, "userId": user_id} - extra_constraints = [ - "s.project_id = %(project_id)s", - "s.duration IS NOT NULL" - ] - extra_from = "" - events_query_part = "" - if len(data.filters) > 0: - meta_keys = None - for i, f in enumerate(data.filters): - if not isinstance(f.value, list): - f.value = [f.value] - filter_type = f.type - f.value = helper.values_for_operator(value=f.value, op=f.operator) - f_k = f"f_value{i}" - full_args = {**full_args, **sh.multi_values(f.value, value_key=f_k)} - op = sh.get_sql_operator(f.operator) \ - if filter_type not in [schemas.FilterType.events_count] else f.operator - is_any = sh.isAny_opreator(f.operator) - is_undefined = sh.isUndefined_operator(f.operator) - if not is_any and not is_undefined and len(f.value) == 0: - continue - is_not = False - if sh.is_negation_operator(f.operator): - is_not = True - if filter_type == schemas.FilterType.user_browser: - if is_any: - extra_constraints.append('s.user_browser IS NOT NULL') - ss_constraints.append('ms.user_browser IS NOT NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not, - value_key=f_k)) - - elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios]: - if is_any: - extra_constraints.append('s.user_os IS NOT NULL') - ss_constraints.append('ms.user_os IS NOT NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - - elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios]: - if is_any: - extra_constraints.append('s.user_device IS NOT NULL') - ss_constraints.append('ms.user_device IS NOT NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - - elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios]: - if is_any: - extra_constraints.append('s.user_country IS NOT NULL') - ss_constraints.append('ms.user_country IS NOT NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not, - value_key=f_k)) - - elif filter_type in [schemas.FilterType.utm_source]: - if is_any: - extra_constraints.append('s.utm_source IS NOT NULL') - ss_constraints.append('ms.utm_source IS NOT NULL') - elif is_undefined: - extra_constraints.append('s.utm_source IS NULL') - ss_constraints.append('ms.utm_source IS NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.utm_source {op} %({f_k})s::text', f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.utm_source {op} %({f_k})s::text', f.value, is_not=is_not, - value_key=f_k)) - elif filter_type in [schemas.FilterType.utm_medium]: - if is_any: - extra_constraints.append('s.utm_medium IS NOT NULL') - ss_constraints.append('ms.utm_medium IS NOT NULL') - elif is_undefined: - extra_constraints.append('s.utm_medium IS NULL') - ss_constraints.append('ms.utm_medium IS NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not, - value_key=f_k)) - elif filter_type in [schemas.FilterType.utm_campaign]: - if is_any: - extra_constraints.append('s.utm_campaign IS NOT NULL') - ss_constraints.append('ms.utm_campaign IS NOT NULL') - elif is_undefined: - extra_constraints.append('s.utm_campaign IS NULL') - ss_constraints.append('ms.utm_campaign IS NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not, - value_key=f_k)) - - elif filter_type == schemas.FilterType.duration: - if len(f.value) > 0 and f.value[0] is not None: - extra_constraints.append("s.duration >= %(minDuration)s") - ss_constraints.append("ms.duration >= %(minDuration)s") - full_args["minDuration"] = f.value[0] - if len(f.value) > 1 and f.value[1] is not None and int(f.value[1]) > 0: - extra_constraints.append("s.duration <= %(maxDuration)s") - ss_constraints.append("ms.duration <= %(maxDuration)s") - full_args["maxDuration"] = f.value[1] - elif filter_type == schemas.FilterType.referrer: - # extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)" - if is_any: - extra_constraints.append('s.base_referrer IS NOT NULL') - else: - extra_constraints.append( - sh.multi_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not, - value_key=f_k)) - elif filter_type == events.EventType.METADATA.ui_type: - # get metadata list only if you need it - if meta_keys is None: - meta_keys = metadata.get(project_id=project_id) - meta_keys = {m["key"]: m["index"] for m in meta_keys} - if f.source in meta_keys.keys(): - if is_any: - extra_constraints.append(f"s.{metadata.index_to_colname(meta_keys[f.source])} IS NOT NULL") - ss_constraints.append(f"ms.{metadata.index_to_colname(meta_keys[f.source])} IS NOT NULL") - elif is_undefined: - extra_constraints.append(f"s.{metadata.index_to_colname(meta_keys[f.source])} IS NULL") - ss_constraints.append(f"ms.{metadata.index_to_colname(meta_keys[f.source])} IS NULL") - else: - extra_constraints.append( - sh.multi_conditions( - f"s.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text", - f.value, is_not=is_not, value_key=f_k)) - ss_constraints.append( - sh.multi_conditions( - f"ms.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text", - f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: - if is_any: - extra_constraints.append('s.user_id IS NOT NULL') - ss_constraints.append('ms.user_id IS NOT NULL') - elif is_undefined: - extra_constraints.append('s.user_id IS NULL') - ss_constraints.append('ms.user_id IS NULL') - else: - extra_constraints.append( - sh.multi_conditions(f"s.user_id {op} %({f_k})s::text", f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f"ms.user_id {op} %({f_k})s::text", f.value, is_not=is_not, - value_key=f_k)) - elif filter_type in [schemas.FilterType.user_anonymous_id, - schemas.FilterType.user_anonymous_id_ios]: - if is_any: - extra_constraints.append('s.user_anonymous_id IS NOT NULL') - ss_constraints.append('ms.user_anonymous_id IS NOT NULL') - elif is_undefined: - extra_constraints.append('s.user_anonymous_id IS NULL') - ss_constraints.append('ms.user_anonymous_id IS NULL') - else: - extra_constraints.append( - sh.multi_conditions(f"s.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f"ms.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not, - value_key=f_k)) - elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]: - if is_any: - extra_constraints.append('s.rev_id IS NOT NULL') - ss_constraints.append('ms.rev_id IS NOT NULL') - elif is_undefined: - extra_constraints.append('s.rev_id IS NULL') - ss_constraints.append('ms.rev_id IS NULL') - else: - extra_constraints.append( - sh.multi_conditions(f"s.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f"ms.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, - value_key=f_k)) - elif filter_type == schemas.FilterType.platform: - # op = __ sh.get_sql_operator(f.operator) - extra_constraints.append( - sh.multi_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not, - value_key=f_k)) - elif filter_type == schemas.FilterType.issue: - if is_any: - extra_constraints.append("array_length(s.issue_types, 1) > 0") - ss_constraints.append("array_length(ms.issue_types, 1) > 0") - else: - extra_constraints.append( - sh.multi_conditions(f"%({f_k})s {op} ANY (s.issue_types)", f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not, - value_key=f_k)) - # search sessions with click_rage on a specific selector - if len(f.filters) > 0 and schemas.IssueType.click_rage in f.value: - for j, sf in enumerate(f.filters): - if sf.operator == schemas.IssueFilterOperator._on_selector: - f_k = f"f_value{i}_{j}" - full_args = {**full_args, **sh.multi_values(sf.value, value_key=f_k)} - extra_constraints += ["mc.timestamp>=%(startDate)s", - "mc.timestamp<=%(endDate)s", - "mis.type='click_rage'", - sh.multi_conditions(f"mc.selector=%({f_k})s", - sf.value, is_not=is_not, - value_key=f_k)] - - extra_from += """INNER JOIN events.clicks AS mc USING(session_id) - INNER JOIN events_common.issues USING (session_id,timestamp) - INNER JOIN public.issues AS mis USING (issue_id)\n""" - - elif filter_type == schemas.FilterType.events_count: - extra_constraints.append( - sh.multi_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f"ms.events_count {op} %({f_k})s", f.value, is_not=is_not, - value_key=f_k)) - # --------------------------------------------------------------------------- - if len(data.events) > 0: - valid_events_count = 0 - for event in data.events: - is_any = sh.isAny_opreator(event.operator) - if not isinstance(event.value, list): - event.value = [event.value] - if __is_valid_event(is_any=is_any, event=event): - valid_events_count += 1 - events_query_from = [] - event_index = 0 - or_events = data.events_order == schemas.SearchEventOrder._or - # events_joiner = " FULL JOIN " if or_events else " INNER JOIN LATERAL " - events_joiner = " UNION " if or_events else " INNER JOIN LATERAL " - for i, event in enumerate(data.events): - event_type = event.type - is_any = sh.isAny_opreator(event.operator) - if not isinstance(event.value, list): - event.value = [event.value] - if not __is_valid_event(is_any=is_any, event=event): - continue - op = sh.get_sql_operator(event.operator) - is_not = False - if sh.is_negation_operator(event.operator): - is_not = True - op = sh.reverse_sql_operator(op) - if event_index == 0 or or_events: - event_from = "%s INNER JOIN public.sessions AS ms USING (session_id)" - event_where = ["ms.project_id = %(projectId)s", "main.timestamp >= %(startDate)s", - "main.timestamp <= %(endDate)s", "ms.start_ts >= %(startDate)s", - "ms.start_ts <= %(endDate)s", "ms.duration IS NOT NULL"] - if favorite_only and not errors_only: - event_from += "INNER JOIN public.user_favorite_sessions AS fs USING(session_id)" - event_where.append("fs.user_id = %(userId)s") - else: - event_from = "%s" - event_where = ["main.timestamp >= %(startDate)s", "main.timestamp <= %(endDate)s", - "main.session_id=event_0.session_id"] - if data.events_order == schemas.SearchEventOrder._then: - event_where.append(f"event_{event_index - 1}.timestamp <= main.timestamp") - e_k = f"e_value{i}" - s_k = e_k + "_source" - if event.type != schemas.PerformanceEventType.time_between_events: - event.value = helper.values_for_operator(value=event.value, op=event.operator) - full_args = {**full_args, - **sh.multi_values(event.value, value_key=e_k), - **sh.multi_values(event.source, value_key=s_k)} - - if event_type == events.EventType.CLICK.ui_type: - event_from = event_from % f"{events.EventType.CLICK.table} AS main " - if not is_any: - if event.operator == schemas.ClickEventExtraOperator._on_selector: - event_where.append( - sh.multi_conditions(f"main.selector = %({e_k})s", event.value, value_key=e_k)) - else: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.CLICK.column} {op} %({e_k})s", event.value, - value_key=e_k)) - - elif event_type == events.EventType.INPUT.ui_type: - event_from = event_from % f"{events.EventType.INPUT.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.INPUT.column} {op} %({e_k})s", event.value, - value_key=e_k)) - if event.source is not None and len(event.source) > 0: - event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source, - value_key=f"custom{i}")) - full_args = {**full_args, **sh.multi_values(event.source, value_key=f"custom{i}")} - - elif event_type == events.EventType.LOCATION.ui_type: - event_from = event_from % f"{events.EventType.LOCATION.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s", - event.value, value_key=e_k)) - elif event_type == events.EventType.CUSTOM.ui_type: - event_from = event_from % f"{events.EventType.CUSTOM.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.CUSTOM.column} {op} %({e_k})s", event.value, - value_key=e_k)) - elif event_type == events.EventType.REQUEST.ui_type: - event_from = event_from % f"{events.EventType.REQUEST.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s", event.value, - value_key=e_k)) - # elif event_type == events.event_type.GRAPHQL.ui_type: - # event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main " - # if not is_any: - # event_where.append( - # _multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k})s", event.value, - # value_key=e_k)) - elif event_type == events.EventType.STATEACTION.ui_type: - event_from = event_from % f"{events.EventType.STATEACTION.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.STATEACTION.column} {op} %({e_k})s", - event.value, value_key=e_k)) - elif event_type == events.EventType.ERROR.ui_type: - event_from = event_from % f"{events.EventType.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)" - event.source = list(set(event.source)) - if not is_any and event.value not in [None, "*", ""]: - event_where.append( - sh.multi_conditions(f"(main1.message {op} %({e_k})s OR main1.name {op} %({e_k})s)", - event.value, value_key=e_k)) - if len(event.source) > 0 and event.source[0] not in [None, "*", ""]: - event_where.append(sh.multi_conditions(f"main1.source = %({s_k})s", event.source, value_key=s_k)) - - - # ----- IOS - elif event_type == events.EventType.CLICK_IOS.ui_type: - event_from = event_from % f"{events.EventType.CLICK_IOS.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.CLICK_IOS.column} {op} %({e_k})s", - event.value, value_key=e_k)) - - elif event_type == events.EventType.INPUT_IOS.ui_type: - event_from = event_from % f"{events.EventType.INPUT_IOS.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.INPUT_IOS.column} {op} %({e_k})s", - event.value, value_key=e_k)) - if event.source is not None and len(event.source) > 0: - event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source, - value_key="custom{i}")) - full_args = {**full_args, **sh.multi_values(event.source, f"custom{i}")} - elif event_type == events.EventType.VIEW_IOS.ui_type: - event_from = event_from % f"{events.EventType.VIEW_IOS.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.VIEW_IOS.column} {op} %({e_k})s", - event.value, value_key=e_k)) - elif event_type == events.EventType.CUSTOM_IOS.ui_type: - event_from = event_from % f"{events.EventType.CUSTOM_IOS.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.CUSTOM_IOS.column} {op} %({e_k})s", - event.value, value_key=e_k)) - elif event_type == events.EventType.REQUEST_IOS.ui_type: - event_from = event_from % f"{events.EventType.REQUEST_IOS.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.REQUEST_IOS.column} {op} %({e_k})s", - event.value, value_key=e_k)) - elif event_type == events.EventType.ERROR_IOS.ui_type: - event_from = event_from % f"{events.EventType.ERROR_IOS.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_id)" - if not is_any and event.value not in [None, "*", ""]: - event_where.append( - sh.multi_conditions(f"(main1.reason {op} %({e_k})s OR main1.name {op} %({e_k})s)", - event.value, value_key=e_k)) - elif event_type == schemas.PerformanceEventType.fetch_failed: - event_from = event_from % f"{events.EventType.REQUEST.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s", - event.value, value_key=e_k)) - col = performance_event.get_col(event_type) - colname = col["column"] - event_where.append(f"main.{colname} = FALSE") - # elif event_type == schemas.PerformanceEventType.fetch_duration: - # event_from = event_from % f"{events.event_type.REQUEST.table} AS main " - # if not is_any: - # event_where.append( - # _multiple_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k})s", - # event.value, value_key=e_k)) - # col = performance_event.get_col(event_type) - # colname = col["column"] - # tname = "main" - # e_k += "_custom" - # full_args = {**full_args, **_ sh.multiple_values(event.source, value_key=e_k)} - # event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " + - # _multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s", - # event.source, value_key=e_k)) - elif event_type in [schemas.PerformanceEventType.location_dom_complete, - schemas.PerformanceEventType.location_largest_contentful_paint_time, - schemas.PerformanceEventType.location_ttfb, - schemas.PerformanceEventType.location_avg_cpu_load, - schemas.PerformanceEventType.location_avg_memory_usage - ]: - event_from = event_from % f"{events.EventType.LOCATION.table} AS main " - col = performance_event.get_col(event_type) - colname = col["column"] - tname = "main" - if col.get("extraJoin") is not None: - tname = "ej" - event_from += f" INNER JOIN {col['extraJoin']} AS {tname} USING(session_id)" - event_where += [f"{tname}.timestamp >= main.timestamp", f"{tname}.timestamp >= %(startDate)s", - f"{tname}.timestamp <= %(endDate)s"] - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s", - event.value, value_key=e_k)) - e_k += "_custom" - full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)} - - event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " + - sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator.value} %({e_k})s", - event.source, value_key=e_k)) - elif event_type == schemas.PerformanceEventType.time_between_events: - event_from = event_from % f"{getattr(events.EventType, event.value[0].type).table} AS main INNER JOIN {getattr(events.EventType, event.value[1].type).table} AS main2 USING(session_id) " - if not isinstance(event.value[0].value, list): - event.value[0].value = [event.value[0].value] - if not isinstance(event.value[1].value, list): - event.value[1].value = [event.value[1].value] - event.value[0].value = helper.values_for_operator(value=event.value[0].value, - op=event.value[0].operator) - event.value[1].value = helper.values_for_operator(value=event.value[1].value, - op=event.value[0].operator) - e_k1 = e_k + "_e1" - e_k2 = e_k + "_e2" - full_args = {**full_args, - **sh.multi_values(event.value[0].value, value_key=e_k1), - **sh.multi_values(event.value[1].value, value_key=e_k2)} - s_op = sh.get_sql_operator(event.value[0].operator) - event_where += ["main2.timestamp >= %(startDate)s", "main2.timestamp <= %(endDate)s"] - if event_index > 0 and not or_events: - event_where.append("main2.session_id=event_0.session_id") - is_any = sh.isAny_opreator(event.value[0].operator) - if not is_any: - event_where.append( - sh.multi_conditions( - f"main.{getattr(events.EventType, event.value[0].type).column} {s_op} %({e_k1})s", - event.value[0].value, value_key=e_k1)) - s_op = sh.get_sql_operator(event.value[1].operator) - is_any = sh.isAny_opreator(event.value[1].operator) - if not is_any: - event_where.append( - sh.multi_conditions( - f"main2.{getattr(events.EventType, event.value[1].type).column} {s_op} %({e_k2})s", - event.value[1].value, value_key=e_k2)) - - e_k += "_custom" - full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)} - event_where.append( - sh.multi_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator.value} %({e_k})s", - event.source, value_key=e_k)) - - elif event_type == schemas.EventType.request_details: - event_from = event_from % f"{events.EventType.REQUEST.table} AS main " - apply = False - for j, f in enumerate(event.filters): - is_any = sh.isAny_opreator(f.operator) - if is_any or len(f.value) == 0: - continue - f.value = helper.values_for_operator(value=f.value, op=f.operator) - op = sh.get_sql_operator(f.operator) - e_k_f = e_k + f"_fetch{j}" - full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)} - if f.type == schemas.FetchFilterType._url: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k_f})s::text", - f.value, value_key=e_k_f)) - apply = True - elif f.type == schemas.FetchFilterType._status_code: - event_where.append( - sh.multi_conditions(f"main.status_code {f.operator.value} %({e_k_f})s::integer", f.value, - value_key=e_k_f)) - apply = True - elif f.type == schemas.FetchFilterType._method: - event_where.append( - sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f)) - apply = True - elif f.type == schemas.FetchFilterType._duration: - event_where.append( - sh.multi_conditions(f"main.duration {f.operator.value} %({e_k_f})s::integer", f.value, - value_key=e_k_f)) - apply = True - elif f.type == schemas.FetchFilterType._request_body: - event_where.append( - sh.multi_conditions(f"main.request_body {op} %({e_k_f})s::text", f.value, - value_key=e_k_f)) - apply = True - elif f.type == schemas.FetchFilterType._response_body: - event_where.append( - sh.multi_conditions(f"main.response_body {op} %({e_k_f})s::text", f.value, - value_key=e_k_f)) - apply = True - else: - print(f"undefined FETCH filter: {f.type}") - if not apply: - continue - elif event_type == schemas.EventType.graphql: - event_from = event_from % f"{events.EventType.GRAPHQL.table} AS main " - for j, f in enumerate(event.filters): - is_any = sh.isAny_opreator(f.operator) - if is_any or len(f.value) == 0: - continue - f.value = helper.values_for_operator(value=f.value, op=f.operator) - op = sh.get_sql_operator(f.operator) - e_k_f = e_k + f"_graphql{j}" - full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)} - if f.type == schemas.GraphqlFilterType._name: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value, - value_key=e_k_f)) - elif f.type == schemas.GraphqlFilterType._method: - event_where.append( - sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f)) - elif f.type == schemas.GraphqlFilterType._request_body: - event_where.append( - sh.multi_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f)) - elif f.type == schemas.GraphqlFilterType._response_body: - event_where.append( - sh.multi_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f)) - else: - print(f"undefined GRAPHQL filter: {f.type}") - else: - continue - if event_index == 0 or or_events: - event_where += ss_constraints - if is_not: - if event_index == 0 or or_events: - events_query_from.append(f"""\ - (SELECT - session_id, - 0 AS timestamp - FROM sessions - WHERE EXISTS(SELECT session_id - FROM {event_from} - WHERE {" AND ".join(event_where)} - AND sessions.session_id=ms.session_id) IS FALSE - AND project_id = %(projectId)s - AND start_ts >= %(startDate)s - AND start_ts <= %(endDate)s - AND duration IS NOT NULL - ) {"" if or_events else (f"AS event_{event_index}" + ("ON(TRUE)" if event_index > 0 else ""))}\ - """) - else: - events_query_from.append(f"""\ - (SELECT - event_0.session_id, - event_{event_index - 1}.timestamp AS timestamp - WHERE EXISTS(SELECT session_id FROM {event_from} WHERE {" AND ".join(event_where)}) IS FALSE - ) AS event_{event_index} {"ON(TRUE)" if event_index > 0 else ""}\ - """) - else: - events_query_from.append(f"""\ - (SELECT main.session_id, {"MIN" if event_index < (valid_events_count - 1) else "MAX"}(main.timestamp) AS timestamp - FROM {event_from} - WHERE {" AND ".join(event_where)} - GROUP BY 1 - ) {"" if or_events else (f"AS event_{event_index} " + ("ON(TRUE)" if event_index > 0 else ""))}\ - """) - event_index += 1 - if event_index > 0: - if or_events: - events_query_part = f"""SELECT - session_id, - MIN(timestamp) AS first_event_ts, - MAX(timestamp) AS last_event_ts - FROM ({events_joiner.join(events_query_from)}) AS u - GROUP BY 1""" - else: - events_query_part = f"""SELECT - event_0.session_id, - MIN(event_0.timestamp) AS first_event_ts, - MAX(event_{event_index - 1}.timestamp) AS last_event_ts - FROM {events_joiner.join(events_query_from)} - GROUP BY 1""" - else: - data.events = [] - # --------------------------------------------------------------------------- - if data.startDate is not None: - extra_constraints.append("s.start_ts >= %(startDate)s") - if data.endDate is not None: - extra_constraints.append("s.start_ts <= %(endDate)s") - # if data.platform is not None: - # if data.platform == schemas.PlatformType.mobile: - # extra_constraints.append(b"s.user_os in ('Android','BlackBerry OS','iOS','Tizen','Windows Phone')") - # elif data.platform == schemas.PlatformType.desktop: - # extra_constraints.append( - # b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')") - - if errors_only: - extra_from += f" INNER JOIN {events.EventType.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)" - extra_constraints.append("ser.source = 'js_exception'") - extra_constraints.append("ser.project_id = %(project_id)s") - # if error_status != schemas.ErrorStatus.all: - # extra_constraints.append("ser.status = %(error_status)s") - # full_args["error_status"] = error_status - # if favorite_only: - # extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" - # extra_constraints.append("ufe.user_id = %(userId)s") - - if favorite_only and not errors_only and user_id is not None: - extra_from += """INNER JOIN (SELECT user_id, session_id - FROM public.user_favorite_sessions - WHERE user_id = %(userId)s) AS favorite_sessions - USING (session_id)""" - elif not favorite_only and not errors_only and user_id is not None: - extra_from += """LEFT JOIN (SELECT user_id, session_id - FROM public.user_favorite_sessions - WHERE user_id = %(userId)s) AS favorite_sessions - USING (session_id)""" - extra_join = "" - if issue is not None: - extra_join = """ - INNER JOIN LATERAL(SELECT TRUE FROM events_common.issues INNER JOIN public.issues AS p_issues USING (issue_id) - WHERE issues.session_id=f.session_id - AND p_issues.type=%(issue_type)s - AND p_issues.context_string=%(issue_contextString)s - AND timestamp >= f.first_event_ts - AND timestamp <= f.last_event_ts) AS issues ON(TRUE) - """ - full_args["issue_contextString"] = issue["contextString"] - full_args["issue_type"] = issue["type"] - if extra_event: - extra_join += f"""INNER JOIN {extra_event} AS ev USING(session_id)""" - extra_constraints.append("ev.timestamp>=%(startDate)s") - extra_constraints.append("ev.timestamp<=%(endDate)s") - query_part = f"""\ - FROM {f"({events_query_part}) AS f" if len(events_query_part) > 0 else "public.sessions AS s"} - {extra_join} - {"INNER JOIN public.sessions AS s USING(session_id)" if len(events_query_part) > 0 else ""} - {extra_from} - WHERE - {" AND ".join(extra_constraints)}""" - return full_args, query_part - - -def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None): - if project_id is None: - all_projects = projects.get_projects(tenant_id=tenant_id, recording_state=False) - else: - all_projects = [ - projects.get_project(tenant_id=tenant_id, project_id=int(project_id), include_last_session=False, - include_gdpr=False)] - - all_projects = {int(p["projectId"]): p["name"] for p in all_projects} - project_ids = list(all_projects.keys()) - - available_keys = metadata.get_keys_by_projects(project_ids) - for i in available_keys: - available_keys[i]["user_id"] = schemas.FilterType.user_id - available_keys[i]["user_anonymous_id"] = schemas.FilterType.user_anonymous_id - results = {} - for i in project_ids: - if m_key not in available_keys[i].values(): - available_keys.pop(i) - results[i] = {"total": 0, "sessions": [], "missingMetadata": True} - project_ids = list(available_keys.keys()) - if len(project_ids) > 0: - with pg_client.PostgresClient() as cur: - sub_queries = [] - for i in project_ids: - col_name = list(available_keys[i].keys())[list(available_keys[i].values()).index(m_key)] - sub_queries.append(cur.mogrify( - f"(SELECT COALESCE(COUNT(s.*)) AS count FROM public.sessions AS s WHERE s.project_id = %(id)s AND s.{col_name} = %(value)s) AS \"{i}\"", - {"id": i, "value": m_value}).decode('UTF-8')) - query = f"""SELECT {", ".join(sub_queries)};""" - cur.execute(query=query) - - rows = cur.fetchone() - - sub_queries = [] - for i in rows.keys(): - results[i] = {"total": rows[i], "sessions": [], "missingMetadata": False, "name": all_projects[int(i)]} - if rows[i] > 0: - col_name = list(available_keys[int(i)].keys())[list(available_keys[int(i)].values()).index(m_key)] - sub_queries.append( - cur.mogrify( - f"""( - SELECT * - FROM ( - SELECT DISTINCT ON(favorite_sessions.session_id, s.session_id) {SESSION_PROJECTION_COLS} - FROM public.sessions AS s LEFT JOIN (SELECT session_id - FROM public.user_favorite_sessions - WHERE user_favorite_sessions.user_id = %(userId)s - ) AS favorite_sessions USING (session_id) - WHERE s.project_id = %(id)s AND s.duration IS NOT NULL AND s.{col_name} = %(value)s - ) AS full_sessions - ORDER BY favorite DESC, issue_score DESC - LIMIT 10 - )""", - {"id": i, "value": m_value, "userId": user_id}).decode('UTF-8')) - if len(sub_queries) > 0: - cur.execute("\nUNION\n".join(sub_queries)) - rows = cur.fetchall() - for i in rows: - results[str(i["project_id"])]["sessions"].append(helper.dict_to_camel_case(i)) - return results - - -def get_user_sessions(project_id, user_id, start_date, end_date): - with pg_client.PostgresClient() as cur: - constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"] - if start_date is not None: - constraints.append("s.start_ts >= %(startDate)s") - if end_date is not None: - constraints.append("s.start_ts <= %(endDate)s") - - query_part = f"""\ - FROM public.sessions AS s - WHERE {" AND ".join(constraints)}""" - - cur.execute(cur.mogrify(f"""\ - SELECT s.project_id, - s.session_id::text AS session_id, - s.user_uuid, - s.user_id, - s.user_os, - s.user_browser, - s.user_device, - s.user_country, - s.start_ts, - s.duration, - s.events_count, - s.pages_count, - s.errors_count - {query_part} - ORDER BY s.session_id - LIMIT 50;""", { - "projectId": project_id, - "userId": user_id, - "startDate": start_date, - "endDate": end_date - })) - - sessions = cur.fetchall() - return helper.list_to_camel_case(sessions) - - -def get_session_user(project_id, user_id): - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """\ - SELECT - user_id, - count(*) as session_count, - max(start_ts) as last_seen, - min(start_ts) as first_seen - FROM - "public".sessions - WHERE - project_id = %(project_id)s - AND user_id = %(userId)s - AND duration is not null - GROUP BY user_id; - """, - {"project_id": project_id, "userId": user_id} - ) - cur.execute(query=query) - data = cur.fetchone() - return helper.dict_to_camel_case(data) - - -def get_session_ids_by_user_ids(project_id, user_ids): - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """\ - SELECT session_id FROM public.sessions - WHERE - project_id = %(project_id)s AND user_id IN %(userId)s;""", - {"project_id": project_id, "userId": tuple(user_ids)} - ) - ids = cur.execute(query=query) - return ids - - -def delete_sessions_by_session_ids(session_ids): - with pg_client.PostgresClient(unlimited_query=True) as cur: - query = cur.mogrify( - """\ - DELETE FROM public.sessions - WHERE - session_id IN %(session_ids)s;""", - {"session_ids": tuple(session_ids)} - ) - cur.execute(query=query) - - return True - - -def delete_sessions_by_user_ids(project_id, user_ids): - with pg_client.PostgresClient(unlimited_query=True) as cur: - query = cur.mogrify( - """\ - DELETE FROM public.sessions - WHERE - project_id = %(project_id)s AND user_id IN %(userId)s;""", - {"project_id": project_id, "userId": tuple(user_ids)} - ) - cur.execute(query=query) - - return True - - -def count_all(): - with pg_client.PostgresClient(unlimited_query=True) as cur: - cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions") - row = cur.fetchone() - return row.get("count", 0) if row else 0 - - -def session_exists(project_id, session_id): - with pg_client.PostgresClient() as cur: - query = cur.mogrify("""SELECT 1 - FROM public.sessions - WHERE session_id=%(session_id)s - AND project_id=%(project_id)s - LIMIT 1;""", - {"project_id": project_id, "session_id": session_id}) - cur.execute(query) - row = cur.fetchone() - return row is not None diff --git a/ee/api/chalicelib/core/sessions_exp.py b/ee/api/chalicelib/core/sessions_exp.py index f60090ed4..888800681 100644 --- a/ee/api/chalicelib/core/sessions_exp.py +++ b/ee/api/chalicelib/core/sessions_exp.py @@ -2,11 +2,8 @@ from typing import List, Union import schemas import schemas_ee -from chalicelib.core import events, metadata, events_ios, \ - sessions_mobs, issues, projects, resources, assist, performance_event, metrics, sessions_devtool, \ - sessions_notes -from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper, errors_helper -from chalicelib.utils import sql_helper as sh +from chalicelib.core import events, metadata, projects, performance_event, metrics +from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper SESSION_PROJECTION_COLS_CH = """\ s.project_id, @@ -51,94 +48,6 @@ SESSION_PROJECTION_COLS_CH_MAP = """\ """ -def __group_metadata(session, project_metadata): - meta = {} - for m in project_metadata.keys(): - if project_metadata[m] is not None and session.get(m) is not None: - meta[project_metadata[m]] = session[m] - session.pop(m) - return meta - - -# This function should not use Clickhouse because it doesn't have `file_key` -def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, include_fav_viewed=False, - group_metadata=False, live=True): - with pg_client.PostgresClient() as cur: - extra_query = [] - if include_fav_viewed: - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_favorite_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS favorite""") - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS viewed""") - query = cur.mogrify( - f"""\ - SELECT - s.*, - s.session_id::text AS session_id, - (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key, - encode(file_key,'hex') AS file_key - {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} - {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} - FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} - WHERE s.project_id = %(project_id)s - AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": context.user_id} - ) - # print("===============") - # print(query) - cur.execute(query=query) - - data = cur.fetchone() - if data is not None: - data = helper.dict_to_camel_case(data) - if full_data: - if data["platform"] == 'ios': - data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) - for e in data['events']: - if e["type"].endswith("_IOS"): - e["type"] = e["type"][:-len("_IOS")] - data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) - data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, - session_id=session_id) - data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) - else: - data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, - group_clickrage=True) - all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) - data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] - # to keep only the first stack - # limit the number of errors to reduce the response-body size - data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors - if e['source'] == "js_exception"][:500] - data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, - session_id=session_id) - data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) - data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) - data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, - context=context) - data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, - start_ts=data["startTs"], - duration=data["duration"]) - - data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, - session_id=session_id, user_id=context.user_id) - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) - data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) - data['live'] = live and assist.is_live(project_id=project_id, - session_id=session_id, - project_key=data["projectKey"]) - data["inDB"] = True - return data - elif live: - return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) - else: - return None - - def __get_sql_operator(op: schemas.SearchEventOperator): return { schemas.SearchEventOperator._is: "=", diff --git a/ee/api/chalicelib/core/sessions_favorite.py b/ee/api/chalicelib/core/sessions_favorite.py index d8ae4e1f7..85e308756 100644 --- a/ee/api/chalicelib/core/sessions_favorite.py +++ b/ee/api/chalicelib/core/sessions_favorite.py @@ -10,13 +10,15 @@ def add_favorite_session(context: schemas_ee.CurrentContext, project_id, session cur.execute( cur.mogrify(f"""\ INSERT INTO public.user_favorite_sessions(user_id, session_id) - VALUES (%(userId)s,%(sessionId)s);""", - {"userId": context.user_id, "sessionId": session_id}) + VALUES (%(userId)s,%(session_id)s) + RETURNING session_id;""", + {"userId": context.user_id, "session_id": session_id}) ) - - sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) - return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, - full_data=False, include_fav_viewed=True, context=context) + row = cur.fetchone() + if row: + sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) + return {"data": {"sessionId": session_id}} + return {"errors": ["something went wrong"]} def remove_favorite_session(context: schemas_ee.CurrentContext, project_id, session_id): @@ -25,12 +27,15 @@ def remove_favorite_session(context: schemas_ee.CurrentContext, project_id, sess cur.mogrify(f"""\ DELETE FROM public.user_favorite_sessions WHERE user_id = %(userId)s - AND session_id = %(sessionId)s;""", - {"userId": context.user_id, "sessionId": session_id}) + AND session_id = %(session_id)s + RETURNING session_id;""", + {"userId": context.user_id, "session_id": session_id}) ) - sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) - return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, - full_data=False, include_fav_viewed=True, context=context) + row = cur.fetchone() + if row: + sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) + return {"data": {"sessionId": session_id}} + return {"errors": ["something went wrong"]} def favorite_session(context: schemas_ee.CurrentContext, project_id, session_id): diff --git a/ee/api/chalicelib/core/sessions_replay.py b/ee/api/chalicelib/core/sessions_replay.py new file mode 100644 index 000000000..319eb13b6 --- /dev/null +++ b/ee/api/chalicelib/core/sessions_replay.py @@ -0,0 +1,192 @@ +import schemas +import schemas_ee +from chalicelib.core import events, metadata, events_ios, \ + sessions_mobs, issues, resources, assist, sessions_devtool, sessions_notes +from chalicelib.utils import errors_helper +from chalicelib.utils import pg_client, helper + + +def __group_metadata(session, project_metadata): + meta = {} + for m in project_metadata.keys(): + if project_metadata[m] is not None and session.get(m) is not None: + meta[project_metadata[m]] = session[m] + session.pop(m) + return meta + + +# for backward compatibility +# This function should not use Clickhouse because it doesn't have `file_key` +def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, + include_fav_viewed=False, group_metadata=False, live=True): + with pg_client.PostgresClient() as cur: + extra_query = [] + if include_fav_viewed: + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_favorite_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS favorite""") + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_viewed_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS viewed""") + query = cur.mogrify( + f"""\ + SELECT + s.*, + s.session_id::text AS session_id, + (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key, + encode(file_key,'hex') AS file_key + {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} + {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} + FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + data = cur.fetchone() + if data is not None: + data = helper.dict_to_camel_case(data) + if full_data: + if data["platform"] == 'ios': + data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) + for e in data['events']: + if e["type"].endswith("_IOS"): + e["type"] = e["type"][:-len("_IOS")] + data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) + data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, + session_id=session_id) + data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) + else: + data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, + group_clickrage=True) + all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) + data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] + # to keep only the first stack + # limit the number of errors to reduce the response-body size + data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors + if e['source'] == "js_exception"][:500] + data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, + session_id=session_id) + data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, + context=context) + data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + start_ts=data["startTs"], duration=data["duration"]) + + data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, + session_id=session_id, user_id=context.user_id) + data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) + data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, + project_key=data["projectKey"]) + data["inDB"] = True + return data + elif live: + return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) + else: + return None + + +# This function should not use Clickhouse because it doesn't have `file_key` +def get_replay(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, + group_metadata=False, live=True): + with pg_client.PostgresClient() as cur: + extra_query = [] + if include_fav_viewed: + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_favorite_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS favorite""") + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_viewed_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS viewed""") + query = cur.mogrify( + f"""\ + SELECT + s.*, + s.session_id::text AS session_id, + (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key + {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} + {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} + FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + data = cur.fetchone() + if data is not None: + data = helper.dict_to_camel_case(data) + if full_data: + if data["platform"] == 'ios': + data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) + else: + data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, + context=context) + + data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, + project_key=data["projectKey"]) + data["inDB"] = True + return data + elif live: + return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) + else: + return None + + +def get_events(project_id, session_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify( + f"""SELECT session_id, platform, start_ts, duration + FROM public.sessions AS s + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + s_data = cur.fetchone() + if s_data is not None: + s_data = helper.dict_to_camel_case(s_data) + data = {} + if s_data["platform"] == 'ios': + data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) + for e in data['events']: + if e["type"].endswith("_IOS"): + e["type"] = e["type"][:-len("_IOS")] + data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) + data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, + session_id=session_id) + else: + data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, + group_clickrage=True) + all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) + data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] + # to keep only the first stack + # limit the number of errors to reduce the response-body size + data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors + if e['source'] == "js_exception"][:500] + data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, + session_id=session_id) + data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + start_ts=s_data["startTs"], duration=s_data["duration"]) + + data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) + return data + else: + return None diff --git a/ee/api/chalicelib/core/signup.py b/ee/api/chalicelib/core/signup.py index d01e1379a..1344b5e7b 100644 --- a/ee/api/chalicelib/core/signup.py +++ b/ee/api/chalicelib/core/signup.py @@ -11,7 +11,7 @@ from chalicelib.utils import pg_client from chalicelib.utils.TimeUTC import TimeUTC -def create_step1(data: schemas.UserSignupSchema): +def create_tenant(data: schemas.UserSignupSchema): print(f"===================== SIGNUP STEP 1 AT {TimeUTC.to_human_readable(TimeUTC.now())} UTC") errors = [] if not config("MULTI_TENANTS", cast=bool, default=False) and tenants.tenants_exists(): diff --git a/ee/api/chalicelib/core/tenants.py b/ee/api/chalicelib/core/tenants.py index 30a87bd29..7ea621007 100644 --- a/ee/api/chalicelib/core/tenants.py +++ b/ee/api/chalicelib/core/tenants.py @@ -51,7 +51,7 @@ def get_by_api_key(api_key): WHERE tenants.api_key = %(api_key)s AND tenants.deleted_at ISNULL LIMIT 1;""", - {"api_key": api_key}) + {"api_key": api_key}) cur.execute(query=query) return helper.dict_to_camel_case(cur.fetchone()) @@ -94,7 +94,7 @@ def update(tenant_id, user_id, data: schemas.UpdateTenantSchema): return edit_client(tenant_id=tenant_id, changes=changes) -def tenants_exists(): - with pg_client.PostgresClient() as cur: +def tenants_exists(use_pool=True): + with pg_client.PostgresClient(use_pool=use_pool) as cur: cur.execute(f"SELECT EXISTS(SELECT 1 FROM public.tenants)") return cur.fetchone()["exists"] diff --git a/ee/api/chalicelib/core/webhook.py b/ee/api/chalicelib/core/webhook.py index d1e70d3e7..55405cc0d 100644 --- a/ee/api/chalicelib/core/webhook.py +++ b/ee/api/chalicelib/core/webhook.py @@ -2,8 +2,7 @@ import logging from typing import Optional import requests -from fastapi import HTTPException -from starlette import status +from fastapi import HTTPException, status import schemas from chalicelib.utils import pg_client, helper @@ -113,7 +112,7 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="", def exists_by_name(tenant_id: int, name: str, exclude_id: Optional[int], - webhook_type: str = schemas.WebhookType.webhook) -> bool: + webhook_type: str = schemas.WebhookType.webhook) -> bool: with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""SELECT EXISTS(SELECT 1 FROM public.webhooks diff --git a/ee/api/chalicelib/utils/assist_helper.py b/ee/api/chalicelib/utils/assist_helper.py index d182226c0..061b329ef 100644 --- a/ee/api/chalicelib/utils/assist_helper.py +++ b/ee/api/chalicelib/utils/assist_helper.py @@ -37,13 +37,16 @@ def get_full_config(): if __get_secret() is not None: for i in range(len(servers)): url = servers[i].split(",")[0] - servers[i] = {"url": url} if url.lower().startswith("stun") else {"url": url, **credentials} + # servers[i] = {"url": url} if url.lower().startswith("stun") else {"url": url, **credentials} + servers[i] = {"urls": url} if url.lower().startswith("stun") else {"urls": url, **credentials} else: for i in range(len(servers)): s = servers[i].split(",") if len(s) == 3: - servers[i] = {"url": s[0], "username": s[1], "credential": s[2]} + # servers[i] = {"url": s[0], "username": s[1], "credential": s[2]} + servers[i] = {"urls": s[0], "username": s[1], "credential": s[2]} else: - servers[i] = {"url": s[0]} + # servers[i] = {"url": s[0]} + servers[i] = {"urls": s[0]} return servers diff --git a/ee/api/clean-dev.sh b/ee/api/clean-dev.sh index acc91e7b7..c47a80ee8 100755 --- a/ee/api/clean-dev.sh +++ b/ee/api/clean-dev.sh @@ -35,6 +35,7 @@ rm -rf ./chalicelib/core/log_tool_stackdriver.py rm -rf ./chalicelib/core/log_tool_sumologic.py rm -rf ./chalicelib/core/metadata.py rm -rf ./chalicelib/core/mobile.py +rm -rf ./chalicelib/core/sessions.py rm -rf ./chalicelib/core/sessions_assignments.py #exp rm -rf ./chalicelib/core/sessions_metas.py rm -rf ./chalicelib/core/sessions_mobs.py @@ -78,9 +79,12 @@ rm -rf ./Dockerfile_bundle rm -rf ./entrypoint.bundle.sh rm -rf ./chalicelib/core/heatmaps.py rm -rf ./schemas.py +rm -rf ./routers/subs/health.py rm -rf ./routers/subs/v1_api.py #exp rm -rf ./chalicelib/core/custom_metrics.py rm -rf ./chalicelib/core/performance_event.py rm -rf ./chalicelib/core/saved_search.py rm -rf ./app_alerts.py rm -rf ./build_alerts.sh +rm -rf ./run-dev.sh +rm -rf ./run-alerts-dev.sh diff --git a/ee/api/entrypoint.sh b/ee/api/entrypoint.sh index ebd646a7d..e63d4e2af 100755 --- a/ee/api/entrypoint.sh +++ b/ee/api/entrypoint.sh @@ -2,4 +2,4 @@ sh env_vars.sh source /tmp/.env.override -uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload --proxy-headers +uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --proxy-headers diff --git a/ee/api/entrypoint_alerts.sh b/ee/api/entrypoint_alerts.sh index acf8b390a..410015142 100755 --- a/ee/api/entrypoint_alerts.sh +++ b/ee/api/entrypoint_alerts.sh @@ -2,4 +2,4 @@ export ASSIST_KEY=ignore sh env_vars.sh source /tmp/.env.override -uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload +uvicorn app:app --host 0.0.0.0 --port 8888 diff --git a/ee/api/env.default b/ee/api/env.default index cdbc3d256..ff82cb2af 100644 --- a/ee/api/env.default +++ b/ee/api/env.default @@ -11,6 +11,7 @@ LICENSE_KEY= S3_HOST= S3_KEY= S3_SECRET= +S3_DISABLE_SSL_VERIFY= SAML2_MD_URL= SITE_URL= announcement_url= @@ -50,7 +51,7 @@ ASSIST_RECORDS_BUCKET=records sessions_bucket=mobs sessions_region=us-east-1 sourcemaps_bucket=sourcemaps -sourcemaps_reader=http://sourcemaps-reader-openreplay.app.svc.cluster.local:9000/sourcemaps/%s/sourcemaps +sourcemaps_reader=http://sourcemapreader-openreplay.app.svc.cluster.local:9000/sourcemaps/%s/sourcemaps version_number=1.0.0 FS_DIR=/mnt/efs EXP_SESSIONS_SEARCH=false @@ -70,4 +71,7 @@ SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob PRESIGNED_URL_EXPIRATION=3600 ASSIST_JWT_EXPIRATION=144000 -ASSIST_JWT_SECRET= \ No newline at end of file +ASSIST_JWT_SECRET= +REDIS_STRING=redis://redis-master.db.svc.cluster.local:6379 +KAFKA_SERVERS=kafka.db.svc.cluster.local:9092 +KAFKA_USE_SSL=false \ No newline at end of file diff --git a/ee/api/requirements-alerts.txt b/ee/api/requirements-alerts.txt index 250882623..b6f49f6c1 100644 --- a/ee/api/requirements-alerts.txt +++ b/ee/api/requirements-alerts.txt @@ -1,18 +1,18 @@ requests==2.28.2 -urllib3==1.26.14 -boto3==1.26.70 +urllib3==1.26.15 +boto3==1.26.100 pyjwt==2.6.0 psycopg2-binary==2.9.5 -elasticsearch==8.6.1 -jira==3.4.1 +elasticsearch==8.6.2 +jira==3.5.0 -fastapi==0.92.0 -uvicorn[standard]==0.20.0 -python-decouple==3.7 -pydantic[email]==1.10.4 -apscheduler==3.10.0 +fastapi==0.95.0 +uvicorn[standard]==0.21.1 +python-decouple==3.8 +pydantic[email]==1.10.7 +apscheduler==3.10.1 clickhouse-driver==0.2.5 python-multipart==0.0.5 \ No newline at end of file diff --git a/ee/api/requirements-crons.txt b/ee/api/requirements-crons.txt index 5f3742cdd..616a3f9d4 100644 --- a/ee/api/requirements-crons.txt +++ b/ee/api/requirements-crons.txt @@ -1,13 +1,13 @@ requests==2.28.2 -urllib3==1.26.14 -boto3==1.26.70 +urllib3==1.26.15 +boto3==1.26.100 pyjwt==2.6.0 psycopg2-binary==2.9.5 -elasticsearch==8.6.1 -jira==3.4.1 +elasticsearch==8.6.2 +jira==3.5.0 -apscheduler==3.10.0 +apscheduler==3.10.1 clickhouse-driver==0.2.5 \ No newline at end of file diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index c8b76e700..d6a3a0dc1 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -1,19 +1,22 @@ requests==2.28.2 -urllib3==1.26.14 -boto3==1.26.70 +urllib3==1.26.15 +boto3==1.26.100 pyjwt==2.6.0 psycopg2-binary==2.9.5 -elasticsearch==8.6.1 -jira==3.4.1 +elasticsearch==8.6.2 +jira==3.5.0 -fastapi==0.92.0 -uvicorn[standard]==0.20.0 -python-decouple==3.7 -pydantic[email]==1.10.4 -apscheduler==3.10.0 +fastapi==0.95.0 +uvicorn[standard]==0.21.1 +python-decouple==3.8 +pydantic[email]==1.10.7 +apscheduler==3.10.1 clickhouse-driver==0.2.5 python3-saml==1.15.0 -python-multipart==0.0.5 +python-multipart==0.0.6 + +redis==4.5.3 +#confluent-kafka==2.0.2 \ No newline at end of file diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 8c8aa55b6..35927097e 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -7,7 +7,7 @@ from starlette.responses import RedirectResponse, FileResponse import schemas import schemas_ee from chalicelib.core import sessions, assist, heatmaps, sessions_favorite, sessions_assignments, errors, errors_viewed, \ - errors_favorite, sessions_notes, click_maps + errors_favorite, sessions_notes, click_maps, sessions_replay, signup from chalicelib.core import sessions_viewed from chalicelib.core import tenants, users, projects, license from chalicelib.core import webhook @@ -31,6 +31,13 @@ async def get_all_signup(): "edition": license.EDITION}} +if config("MULTI_TENANTS", cast=bool, default=False) or not tenants.tenants_exists(use_pool=False): + @public_app.post('/signup', tags=['signup']) + @public_app.put('/signup', tags=['signup']) + async def signup_handler(data: schemas.UserSignupSchema = Body(...)): + return signup.create_tenant(data) + + @app.get('/account', tags=['accounts']) async def get_account(context: schemas.CurrentContext = Depends(OR_context)): r = users.get(tenant_id=context.tenant_id, user_id=context.user_id) @@ -59,7 +66,8 @@ async def edit_account(data: schemas_ee.EditUserSchema = Body(...), @app.post('/integrations/slack', tags=['integrations']) @app.put('/integrations/slack', tags=['integrations']) -async def add_slack_client(data: schemas.AddCollaborationSchema, context: schemas.CurrentContext = Depends(OR_context)): +async def add_slack_integration(data: schemas.AddCollaborationSchema, + context: schemas.CurrentContext = Depends(OR_context)): n = Slack.add(tenant_id=context.tenant_id, data=data) if n is None: return { @@ -155,13 +163,15 @@ async def get_projects(context: schemas.CurrentContext = Depends(OR_context)): stack_integrations=True, user_id=context.user_id)} -@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"], dependencies=[OR_scope(Permissions.session_replay)]) +# for backward compatibility +@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"], + dependencies=[OR_scope(Permissions.session_replay)]) async def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): return {"errors": ["session not found"]} - data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, - include_fav_viewed=True, group_metadata=True, context=context) + data = sessions_replay.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, + include_fav_viewed=True, group_metadata=True, context=context) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): @@ -172,6 +182,39 @@ async def get_session(projectId: int, sessionId: Union[int, str], background_tas } +@app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"], + dependencies=[OR_scope(Permissions.session_replay)]) +async def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str): + return {"errors": ["session not found"]} + data = sessions_replay.get_replay(project_id=projectId, session_id=sessionId, full_data=True, + include_fav_viewed=True, group_metadata=True, context=context) + if data is None: + return {"errors": ["session not found"]} + if data.get("inDB"): + background_tasks.add_task(sessions_viewed.view_session, project_id=projectId, user_id=context.user_id, + session_id=sessionId) + return { + 'data': data + } + + +@app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"], + dependencies=[OR_scope(Permissions.session_replay)]) +async def get_session_events(projectId: int, sessionId: Union[int, str], + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str): + return {"errors": ["session not found"]} + data = sessions_replay.get_events(project_id=projectId, session_id=sessionId) + if data is None: + return {"errors": ["session not found"]} + + return { + 'data': data + } + + @app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"], dependencies=[OR_scope(Permissions.dev_tools)]) async def get_error_trace(projectId: int, sessionId: int, errorId: str, @@ -250,8 +293,8 @@ async def get_live_session(projectId: int, sessionId: str, background_tasks: Bac context: schemas_ee.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: - data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId, - full_data=True, include_fav_viewed=True, group_metadata=True, live=False) + data = sessions_replay.get_replay(context=context, project_id=projectId, session_id=sessionId, + full_data=True, include_fav_viewed=True, group_metadata=True, live=False) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): diff --git a/ee/api/routers/saml.py b/ee/api/routers/saml.py index ee0f0333b..cf52aa720 100644 --- a/ee/api/routers/saml.py +++ b/ee/api/routers/saml.py @@ -1,5 +1,4 @@ -from fastapi import HTTPException -from fastapi import Request, Response +from fastapi import HTTPException, Request, Response, status from chalicelib.utils import SAML2_helper from chalicelib.utils.SAML2_helper import prepare_request, init_saml_auth @@ -12,7 +11,6 @@ from onelogin.saml2.auth import OneLogin_Saml2_Logout_Request from chalicelib.core import users, tenants, roles from starlette.responses import RedirectResponse -from starlette import status @public_app.get("/sso/saml2", tags=["saml2"]) diff --git a/ee/api/run-dev.sh b/ee/api/run-dev.sh deleted file mode 100755 index 76682286d..000000000 --- a/ee/api/run-dev.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/zsh - -uvicorn app:app --reload \ No newline at end of file diff --git a/ee/utilities/.gitignore b/ee/assist/.gitignore similarity index 93% rename from ee/utilities/.gitignore rename to ee/assist/.gitignore index cd68b1ffb..98f9f8386 100644 --- a/ee/utilities/.gitignore +++ b/ee/assist/.gitignore @@ -11,6 +11,7 @@ servers/peerjs-server.js servers/sourcemaps-handler.js servers/sourcemaps-server.js /utils/geoIP.js +/utils/health.js /utils/HeapSnapshot.js /utils/helper.js /utils/assistHelper.js diff --git a/ee/utilities/Dockerfile b/ee/assist/Dockerfile similarity index 100% rename from ee/utilities/Dockerfile rename to ee/assist/Dockerfile diff --git a/ee/utilities/clean-dev.sh b/ee/assist/clean-dev.sh similarity index 90% rename from ee/utilities/clean-dev.sh rename to ee/assist/clean-dev.sh index ec1aaeae4..eff1f6308 100755 --- a/ee/utilities/clean-dev.sh +++ b/ee/assist/clean-dev.sh @@ -1,4 +1,5 @@ rm -rf ./utils/geoIP.js +rm -rf ./utils/health.js rm -rf ./utils/HeapSnapshot.js rm -rf ./utils/helper.js rm -rf ./utils/assistHelper.js diff --git a/ee/utilities/package-lock.json b/ee/assist/package-lock.json similarity index 97% rename from ee/utilities/package-lock.json rename to ee/assist/package-lock.json index 1d74677cf..a94f1d5bb 100644 --- a/ee/utilities/package-lock.json +++ b/ee/assist/package-lock.json @@ -1,12 +1,12 @@ { "name": "assist-server", - "version": "1.0.0", + "version": "v1.11.0-ee", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "assist-server", - "version": "1.0.0", + "version": "v1.11.0-ee", "license": "Elastic License 2.0 (ELv2)", "dependencies": { "@maxmind/geoip2-node": "^3.5.0", @@ -38,9 +38,9 @@ } }, "node_modules/@redis/client": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/@redis/client/-/client-1.5.5.tgz", - "integrity": "sha512-fuMnpDYSjT5JXR9rrCW1YWA4L8N/9/uS4ImT3ZEC/hcaQRI1D/9FvwjriRj1UvepIgzZXthFVKMNRzP/LNL7BQ==", + "version": "1.5.6", + "resolved": "https://registry.npmjs.org/@redis/client/-/client-1.5.6.tgz", + "integrity": "sha512-dFD1S6je+A47Lj22jN/upVU2fj4huR7S9APd7/ziUXsIXDL+11GPYti4Suv5y8FuXaN+0ZG4JF+y1houEJ7ToA==", "dependencies": { "cluster-key-slot": "1.1.2", "generic-pool": "3.9.0", @@ -67,9 +67,9 @@ } }, "node_modules/@redis/search": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@redis/search/-/search-1.1.1.tgz", - "integrity": "sha512-pqCXTc5e7wJJgUuJiC3hBgfoFRoPxYzwn0BEfKgejTM7M/9zP3IpUcqcjgfp8hF+LoV8rHZzcNTz7V+pEIY7LQ==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@redis/search/-/search-1.1.2.tgz", + "integrity": "sha512-/cMfstG/fOh/SsE+4/BQGeuH/JJloeWuH+qJzM8dbxuWvdWibWAOAHHCZTMPhV3xIlH4/cUEIA8OV5QnYpaVoA==", "peerDependencies": { "@redis/client": "^1.0.0" } @@ -117,9 +117,9 @@ } }, "node_modules/@types/node": { - "version": "18.14.1", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.1.tgz", - "integrity": "sha512-QH+37Qds3E0eDlReeboBxfHbX9omAcBCXEzswCu6jySP642jiM3cYSIkU/REqwhCUqXdonHFuBfJDiAJxMNhaQ==" + "version": "18.15.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.1.tgz", + "integrity": "sha512-U2TWca8AeHSmbpi314QBESRk7oPjSZjDsR+c+H4ECC1l+kFgpZf8Ydhv3SJpPy51VyZHHqxlb6mTTqYNNRVAIw==" }, "node_modules/accepts": { "version": "1.3.8", @@ -878,15 +878,15 @@ } }, "node_modules/redis": { - "version": "4.6.4", - "resolved": "https://registry.npmjs.org/redis/-/redis-4.6.4.tgz", - "integrity": "sha512-wi2tgDdQ+Q8q+PR5FLRx4QvDiWaA+PoJbrzsyFqlClN5R4LplHqN3scs/aGjE//mbz++W19SgxiEnQ27jnCRaA==", + "version": "4.6.5", + "resolved": "https://registry.npmjs.org/redis/-/redis-4.6.5.tgz", + "integrity": "sha512-O0OWA36gDQbswOdUuAhRL6mTZpHFN525HlgZgDaVNgCJIAZR3ya06NTESb0R+TUZ+BFaDpz6NnnVvoMx9meUFg==", "dependencies": { "@redis/bloom": "1.2.0", - "@redis/client": "1.5.5", + "@redis/client": "1.5.6", "@redis/graph": "1.1.0", "@redis/json": "1.0.4", - "@redis/search": "1.1.1", + "@redis/search": "1.1.2", "@redis/time-series": "1.0.4" } }, @@ -1085,9 +1085,9 @@ } }, "node_modules/ua-parser-js": { - "version": "1.0.33", - "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.33.tgz", - "integrity": "sha512-RqshF7TPTE0XLYAqmjlu5cLLuGdKrNu9O1KLA/qp39QtbZwuzwv1dT46DZSopoUMsYgXpB3Cv8a03FI8b74oFQ==", + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.34.tgz", + "integrity": "sha512-K9mwJm/DaB6mRLZfw6q8IMXipcrmuT6yfhYmwhAkuh+81sChuYstYA+znlgaflUPaYUa3odxKPKGw6Vw/lANew==", "funding": [ { "type": "opencollective", diff --git a/ee/utilities/package.json b/ee/assist/package.json similarity index 97% rename from ee/utilities/package.json rename to ee/assist/package.json index 3fcedf03b..4ef88774a 100644 --- a/ee/utilities/package.json +++ b/ee/assist/package.json @@ -1,6 +1,6 @@ { "name": "assist-server", - "version": "1.0.0", + "version": "v1.11.0-ee", "description": "assist server to get live sessions & sourcemaps reader to get stack trace", "main": "peerjs-server.js", "scripts": { diff --git a/ee/utilities/prepare-dev.sh b/ee/assist/prepare-dev.sh similarity index 75% rename from ee/utilities/prepare-dev.sh rename to ee/assist/prepare-dev.sh index 2daecbfc1..8da98eac3 100755 --- a/ee/utilities/prepare-dev.sh +++ b/ee/assist/prepare-dev.sh @@ -1,2 +1,2 @@ #!/bin/bash -rsync -avr --exclude=".*" --exclude="node_modules" --ignore-existing ../../utilities/* ./ \ No newline at end of file +rsync -avr --exclude=".*" --exclude="node_modules" --ignore-existing ../../assist/* ./ \ No newline at end of file diff --git a/utilities/run-dev.sh b/ee/assist/run-dev.sh similarity index 100% rename from utilities/run-dev.sh rename to ee/assist/run-dev.sh diff --git a/ee/utilities/server.js b/ee/assist/server.js similarity index 89% rename from ee/utilities/server.js rename to ee/assist/server.js index 482ddcd17..414d7ac2c 100644 --- a/ee/utilities/server.js +++ b/ee/assist/server.js @@ -1,6 +1,7 @@ const dumps = require('./utils/HeapSnapshot'); const {request_logger} = require('./utils/helper'); const express = require('express'); +const health = require("./utils/health"); const assert = require('assert').strict; let socket; @@ -14,7 +15,7 @@ const HOST = process.env.LISTEN_HOST || '0.0.0.0'; const PORT = process.env.LISTEN_PORT || 9001; assert.ok(process.env.ASSIST_KEY, 'The "ASSIST_KEY" environment variable is required'); const P_KEY = process.env.ASSIST_KEY; -const PREFIX = process.env.PREFIX || process.env.prefix || `/assist` +const PREFIX = process.env.PREFIX || process.env.prefix || `/assist`; let debug = process.env.debug === "1"; const heapdump = process.env.heapdump === "1"; @@ -31,18 +32,11 @@ if (process.env.uws !== "true") { ); heapdump && wsapp.use(`${PREFIX}/${P_KEY}/heapdump`, dumps.router); wsapp.use(`${PREFIX}/${P_KEY}`, socket.wsRouter); - wsapp.get('/private/shutdown', (req, res) => { - console.log("Requested shutdown"); - res.statusCode = 200; - res.end("ok!"); - process.kill(1, "SIGTERM"); - } - ); wsapp.enable('trust proxy'); const wsserver = wsapp.listen(PORT, HOST, () => { console.log(`WS App listening on http://${HOST}:${PORT}`); - console.log('Press Ctrl+C to quit.'); + health.healthApp.listen(health.PORT, HOST, health.listen_cb); }); socket.start(wsserver); @@ -102,13 +96,6 @@ if (process.env.uws !== "true") { uapp.post(`${PREFIX}/${P_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); uapp.get(`${PREFIX}/${P_KEY}/sockets-live/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsLiveByProject)); - uapp.get('/private/shutdown', (res, req) => { - console.log("Requested shutdown"); - res.writeStatus('200 OK').end("ok!"); - process.kill(1, "SIGTERM"); - } - ); - socket.start(uapp); uapp.listen(HOST, PORT, (token) => { @@ -116,7 +103,7 @@ if (process.env.uws !== "true") { console.warn("port already in use"); } console.log(`WS App listening on http://${HOST}:${PORT}`); - console.log('Press Ctrl+C to quit.'); + health.healthApp.listen(health.PORT, HOST, health.listen_cb); }); diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/assist/servers/websocket-cluster.js similarity index 99% rename from ee/utilities/servers/websocket-cluster.js rename to ee/assist/servers/websocket-cluster.js index e129bfcb6..a1f389685 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/assist/servers/websocket-cluster.js @@ -34,7 +34,7 @@ const debug = process.env.debug === "1"; const createSocketIOServer = function (server, prefix) { if (process.env.uws !== "true") { io = _io(server, { - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6, cors: { origin: "*", methods: ["GET", "POST", "PUT"] @@ -43,7 +43,7 @@ const createSocketIOServer = function (server, prefix) { }); } else { io = new _io.Server({ - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6, cors: { origin: "*", methods: ["GET", "POST", "PUT"] diff --git a/ee/utilities/servers/websocket.js b/ee/assist/servers/websocket.js similarity index 98% rename from ee/utilities/servers/websocket.js rename to ee/assist/servers/websocket.js index c906b5987..330361df3 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/assist/servers/websocket.js @@ -29,7 +29,7 @@ const debug = process.env.debug === "1"; const createSocketIOServer = function (server, prefix) { if (process.env.uws !== "true") { io = _io(server, { - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6, cors: { origin: "*", methods: ["GET", "POST", "PUT"] @@ -38,7 +38,7 @@ const createSocketIOServer = function (server, prefix) { }); } else { io = new _io.Server({ - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6, cors: { origin: "*", methods: ["GET", "POST", "PUT"] diff --git a/ee/utilities/utils/helper-ee.js b/ee/assist/utils/helper-ee.js similarity index 100% rename from ee/utilities/utils/helper-ee.js rename to ee/assist/utils/helper-ee.js diff --git a/ee/backend/internal/db/datasaver/fts.go b/ee/backend/internal/db/datasaver/fts.go index 1ff546d27..3c049acae 100644 --- a/ee/backend/internal/db/datasaver/fts.go +++ b/ee/backend/internal/db/datasaver/fts.go @@ -18,6 +18,20 @@ type NetworkRequestFTS struct { Duration uint64 `json:"duration"` } +func WrapNetworkRequest(m *messages.NetworkRequest, projID uint32) *NetworkRequestFTS { + return &NetworkRequestFTS{ + SessionID: m.SessionID(), + ProjectID: projID, + Method: m.Method, + URL: m.URL, + Request: m.Request, + Response: m.Response, + Status: m.Status, + Timestamp: m.Timestamp, + Duration: m.Duration, + } +} + type PageEventFTS struct { SessionID uint64 `json:"session_id"` ProjectID uint32 `json:"project_id"` @@ -40,6 +54,30 @@ type PageEventFTS struct { TimeToInteractive uint64 `json:"time_to_interactive"` } +func WrapPageEvent(m *messages.PageEvent, projID uint32) *PageEventFTS { + return &PageEventFTS{ + SessionID: m.SessionID(), + ProjectID: projID, + MessageID: m.MessageID, + Timestamp: m.Timestamp, + URL: m.URL, + Referrer: m.Referrer, + Loaded: m.Loaded, + RequestStart: m.RequestStart, + ResponseStart: m.ResponseStart, + ResponseEnd: m.ResponseEnd, + DomContentLoadedEventStart: m.DomContentLoadedEventStart, + DomContentLoadedEventEnd: m.DomContentLoadedEventEnd, + LoadEventStart: m.LoadEventStart, + LoadEventEnd: m.LoadEventEnd, + FirstPaint: m.FirstPaint, + FirstContentfulPaint: m.FirstContentfulPaint, + SpeedIndex: m.SpeedIndex, + VisuallyComplete: m.VisuallyComplete, + TimeToInteractive: m.TimeToInteractive, + } +} + type GraphQLFTS struct { SessionID uint64 `json:"session_id"` ProjectID uint32 `json:"project_id"` @@ -49,68 +87,46 @@ type GraphQLFTS struct { Response string `json:"response"` } -func (s *Saver) SendToFTS(msg messages.Message, projID uint32) { +func WrapGraphQL(m *messages.GraphQL, projID uint32) *GraphQLFTS { + return &GraphQLFTS{ + SessionID: m.SessionID(), + ProjectID: projID, + OperationKind: m.OperationKind, + OperationName: m.OperationName, + Variables: m.Variables, + Response: m.Response, + } +} + +func (s *saverImpl) sendToFTS(msg messages.Message) { // Skip, if FTS is disabled if s.producer == nil { return } - var ( - event []byte - err error + projID uint32 + event []byte + err error ) + if sess, err := s.pg.Cache.GetSession(msg.SessionID()); err == nil { + projID = sess.ProjectID + } + switch m := msg.(type) { // Common case *messages.NetworkRequest: - event, err = json.Marshal(NetworkRequestFTS{ - SessionID: msg.SessionID(), - ProjectID: projID, - Method: m.Method, - URL: m.URL, - Request: m.Request, - Response: m.Response, - Status: m.Status, - Timestamp: m.Timestamp, - Duration: m.Duration, - }) + event, err = json.Marshal(WrapNetworkRequest(m, projID)) case *messages.PageEvent: - event, err = json.Marshal(PageEventFTS{ - SessionID: msg.SessionID(), - ProjectID: projID, - MessageID: m.MessageID, - Timestamp: m.Timestamp, - URL: m.URL, - Referrer: m.Referrer, - Loaded: m.Loaded, - RequestStart: m.RequestStart, - ResponseStart: m.ResponseStart, - ResponseEnd: m.ResponseEnd, - DomContentLoadedEventStart: m.DomContentLoadedEventStart, - DomContentLoadedEventEnd: m.DomContentLoadedEventEnd, - LoadEventStart: m.LoadEventStart, - LoadEventEnd: m.LoadEventEnd, - FirstPaint: m.FirstPaint, - FirstContentfulPaint: m.FirstContentfulPaint, - SpeedIndex: m.SpeedIndex, - VisuallyComplete: m.VisuallyComplete, - TimeToInteractive: m.TimeToInteractive, - }) + event, err = json.Marshal(WrapPageEvent(m, projID)) case *messages.GraphQL: - event, err = json.Marshal(GraphQLFTS{ - SessionID: msg.SessionID(), - ProjectID: projID, - OperationKind: m.OperationKind, - OperationName: m.OperationName, - Variables: m.Variables, - Response: m.Response, - }) + event, err = json.Marshal(WrapGraphQL(m, projID)) } if err != nil { log.Printf("can't marshal json for quickwit: %s", err) } else { if len(event) > 0 { - if err := s.producer.Produce(s.topic, msg.SessionID(), event); err != nil { + if err := s.producer.Produce(s.cfg.QuickwitTopic, msg.SessionID(), event); err != nil { log.Printf("can't send event to quickwit: %s", err) } } diff --git a/ee/backend/internal/db/datasaver/messages.go b/ee/backend/internal/db/datasaver/messages.go deleted file mode 100644 index 0a729ee63..000000000 --- a/ee/backend/internal/db/datasaver/messages.go +++ /dev/null @@ -1,114 +0,0 @@ -package datasaver - -import ( - "fmt" - "log" - . "openreplay/backend/pkg/messages" -) - -func (mi *Saver) InsertMessage(msg Message) error { - sessionID := msg.SessionID() - switch m := msg.(type) { - // Common - case *Metadata: - if err := mi.pg.InsertMetadata(sessionID, m); err != nil { - return fmt.Errorf("insert metadata err: %s", err) - } - return nil - case *IssueEvent: - session, err := mi.pg.Cache.GetSession(sessionID) - if err != nil { - log.Printf("can't get session info for CH: %s", err) - } else { - if err := mi.ch.InsertIssue(session, m); err != nil { - log.Printf("can't insert issue event into clickhouse: %s", err) - } - } - return mi.pg.InsertIssueEvent(sessionID, m) - //TODO: message adapter (transformer) (at the level of pkg/message) for types: *IOSMetadata, *IOSIssueEvent and others - - // Web - case *SessionStart: - return mi.pg.HandleWebSessionStart(sessionID, m) - case *SessionEnd: - return mi.pg.HandleWebSessionEnd(sessionID, m) - case *UserID: - return mi.pg.InsertWebUserID(sessionID, m) - case *UserAnonymousID: - return mi.pg.InsertWebUserAnonymousID(sessionID, m) - case *CustomEvent: - session, err := mi.pg.Cache.GetSession(sessionID) - if err != nil { - log.Printf("can't get session info for CH: %s", err) - } else { - if err := mi.ch.InsertCustom(session, m); err != nil { - log.Printf("can't insert graphQL event into clickhouse: %s", err) - } - } - return mi.pg.InsertWebCustomEvent(sessionID, m) - case *ClickEvent: - return mi.pg.InsertWebClickEvent(sessionID, m) - case *InputEvent: - return mi.pg.InsertWebInputEvent(sessionID, m) - - // Unique Web messages - case *PageEvent: - return mi.pg.InsertWebPageEvent(sessionID, m) - case *JSException: - return mi.pg.InsertWebJSException(m) - case *IntegrationEvent: - return mi.pg.InsertWebIntegrationEvent(m) - case *NetworkRequest: - session, err := mi.pg.Cache.GetSession(sessionID) - if err != nil { - log.Printf("can't get session info for CH: %s", err) - } else { - project, err := mi.pg.GetProject(session.ProjectID) - if err != nil { - log.Printf("can't get project: %s", err) - } else { - if err := mi.ch.InsertRequest(session, m, project.SaveRequestPayloads); err != nil { - log.Printf("can't insert request event into clickhouse: %s", err) - } - } - } - return mi.pg.InsertWebNetworkRequest(sessionID, m) - case *GraphQL: - session, err := mi.pg.Cache.GetSession(sessionID) - if err != nil { - log.Printf("can't get session info for CH: %s", err) - } else { - if err := mi.ch.InsertGraphQL(session, m); err != nil { - log.Printf("can't insert graphQL event into clickhouse: %s", err) - } - } - return mi.pg.InsertWebGraphQL(sessionID, m) - case *SetPageLocation: - return mi.pg.InsertSessionReferrer(sessionID, m.Referrer) - - // IOS - case *IOSSessionStart: - return mi.pg.InsertIOSSessionStart(sessionID, m) - case *IOSSessionEnd: - return mi.pg.InsertIOSSessionEnd(sessionID, m) - case *IOSUserID: - return mi.pg.InsertIOSUserID(sessionID, m) - case *IOSUserAnonymousID: - return mi.pg.InsertIOSUserAnonymousID(sessionID, m) - case *IOSCustomEvent: - return mi.pg.InsertIOSCustomEvent(sessionID, m) - case *IOSClickEvent: - return mi.pg.InsertIOSClickEvent(sessionID, m) - case *IOSInputEvent: - return mi.pg.InsertIOSInputEvent(sessionID, m) - // Unique IOS messages - case *IOSNetworkCall: - return mi.pg.InsertIOSNetworkCall(sessionID, m) - case *IOSScreenEnter: - return mi.pg.InsertIOSScreenEnter(sessionID, m) - case *IOSCrash: - return mi.pg.InsertIOSCrash(sessionID, m) - - } - return nil // "Not implemented" -} diff --git a/ee/backend/internal/db/datasaver/methods.go b/ee/backend/internal/db/datasaver/methods.go new file mode 100644 index 000000000..ac0a8b88d --- /dev/null +++ b/ee/backend/internal/db/datasaver/methods.go @@ -0,0 +1,87 @@ +package datasaver + +import ( + "errors" + "log" + + "openreplay/backend/pkg/db/cache" + "openreplay/backend/pkg/db/clickhouse" + "openreplay/backend/pkg/db/types" + "openreplay/backend/pkg/env" + . "openreplay/backend/pkg/messages" + "openreplay/backend/pkg/queue" +) + +func (s *saverImpl) init() { + s.ch = clickhouse.NewConnector(env.String("CLICKHOUSE_STRING")) + if err := s.ch.Prepare(); err != nil { + log.Fatalf("can't prepare clickhouse: %s", err) + } + s.pg.Conn.SetClickHouse(s.ch) + if s.cfg.UseQuickwit { + s.producer = queue.NewProducer(s.cfg.MessageSizeLimit, true) + } +} + +func (s *saverImpl) handleExtraMessage(msg Message) error { + // Send data to quickwit + s.sendToFTS(msg) + + // Get session data + var ( + session *types.Session + err error + ) + if msg.TypeID() == MsgSessionEnd { + session, err = s.pg.GetSession(msg.SessionID()) + } else { + session, err = s.pg.Cache.GetSession(msg.SessionID()) + } + if session == nil { + if err != nil && !errors.Is(err, cache.NilSessionInCacheError) { + log.Printf("Error on session retrieving from cache: %v, SessionID: %v, Message: %v", err, msg.SessionID(), msg) + } + return err + } + + // Handle message + switch m := msg.(type) { + case *SessionEnd: + return s.ch.InsertWebSession(session) + case *PerformanceTrackAggr: + return s.ch.InsertWebPerformanceTrackAggr(session, m) + case *MouseClick: + return s.ch.InsertWebClickEvent(session, m) + case *InputEvent: + return s.ch.InsertWebInputEvent(session, m) + // Unique for Web + case *PageEvent: + return s.ch.InsertWebPageEvent(session, m) + case *ResourceTiming: + return s.ch.InsertWebResourceEvent(session, m) + case *JSException: + return s.ch.InsertWebErrorEvent(session, types.WrapJSException(m)) + case *IntegrationEvent: + return s.ch.InsertWebErrorEvent(session, types.WrapIntegrationEvent(m)) + case *IssueEvent: + return s.ch.InsertIssue(session, m) + case *CustomEvent: + return s.ch.InsertCustom(session, m) + case *NetworkRequest: + project, err := s.pg.GetProject(session.ProjectID) + if err != nil { + log.Printf("can't get project: %s", err) + } else { + if err := s.ch.InsertRequest(session, m, project.SaveRequestPayloads); err != nil { + log.Printf("can't insert request event into clickhouse: %s", err) + } + } + case *GraphQL: + return s.ch.InsertGraphQL(session, m) + case *InputChange: + return s.ch.InsertWebInputDuration(session, m) + case *MouseThrashing: + return s.ch.InsertMouseThrashing(session, m) + } + return nil +} diff --git a/ee/backend/internal/db/datasaver/saver.go b/ee/backend/internal/db/datasaver/saver.go deleted file mode 100644 index e05e502f1..000000000 --- a/ee/backend/internal/db/datasaver/saver.go +++ /dev/null @@ -1,24 +0,0 @@ -package datasaver - -import ( - "openreplay/backend/internal/config/db" - "openreplay/backend/pkg/db/cache" - "openreplay/backend/pkg/db/clickhouse" - "openreplay/backend/pkg/queue" - "openreplay/backend/pkg/queue/types" -) - -type Saver struct { - pg *cache.PGCache - ch clickhouse.Connector - producer types.Producer - topic string -} - -func New(pg *cache.PGCache, cfg *db.Config) *Saver { - var producer types.Producer = nil - if cfg.UseQuickwit { - producer = queue.NewProducer(cfg.MessageSizeLimit, true) - } - return &Saver{pg: pg, producer: producer, topic: cfg.QuickwitTopic} -} diff --git a/ee/backend/internal/db/datasaver/stats.go b/ee/backend/internal/db/datasaver/stats.go deleted file mode 100644 index 049c319bd..000000000 --- a/ee/backend/internal/db/datasaver/stats.go +++ /dev/null @@ -1,56 +0,0 @@ -package datasaver - -import ( - "log" - "openreplay/backend/pkg/db/clickhouse" - "openreplay/backend/pkg/db/types" - "openreplay/backend/pkg/env" - "openreplay/backend/pkg/messages" -) - -func (si *Saver) InitStats() { - si.ch = clickhouse.NewConnector(env.String("CLICKHOUSE_STRING")) - if err := si.ch.Prepare(); err != nil { - log.Fatalf("Clickhouse prepare error: %v\n", err) - } - si.pg.Conn.SetClickHouse(si.ch) -} - -func (si *Saver) InsertStats(session *types.Session, msg messages.Message) error { - // Send data to quickwit - if sess, err := si.pg.Cache.GetSession(msg.SessionID()); err != nil { - si.SendToFTS(msg, 0) - } else { - si.SendToFTS(msg, sess.ProjectID) - } - - switch m := msg.(type) { - // Web - case *messages.SessionEnd: - return si.ch.InsertWebSession(session) - case *messages.PerformanceTrackAggr: - return si.ch.InsertWebPerformanceTrackAggr(session, m) - case *messages.ClickEvent: - return si.ch.InsertWebClickEvent(session, m) - case *messages.InputEvent: - return si.ch.InsertWebInputEvent(session, m) - // Unique for Web - case *messages.PageEvent: - return si.ch.InsertWebPageEvent(session, m) - case *messages.ResourceEvent: - return si.ch.InsertWebResourceEvent(session, m) - case *messages.JSException: - return si.ch.InsertWebErrorEvent(session, types.WrapJSException(m)) - case *messages.IntegrationEvent: - return si.ch.InsertWebErrorEvent(session, types.WrapIntegrationEvent(m)) - } - return nil -} - -func (si *Saver) CommitStats() error { - return si.ch.Commit() -} - -func (si *Saver) Close() error { - return si.ch.Stop() -} diff --git a/ee/backend/pkg/db/clickhouse/connector.go b/ee/backend/pkg/db/clickhouse/connector.go index b872adcc2..12ed1fb3f 100644 --- a/ee/backend/pkg/db/clickhouse/connector.go +++ b/ee/backend/pkg/db/clickhouse/connector.go @@ -10,6 +10,7 @@ import ( "openreplay/backend/pkg/hashid" "openreplay/backend/pkg/messages" "openreplay/backend/pkg/url" + "os" "strings" "time" @@ -21,9 +22,9 @@ type Connector interface { Commit() error Stop() error InsertWebSession(session *types.Session) error - InsertWebResourceEvent(session *types.Session, msg *messages.ResourceEvent) error + InsertWebResourceEvent(session *types.Session, msg *messages.ResourceTiming) error InsertWebPageEvent(session *types.Session, msg *messages.PageEvent) error - InsertWebClickEvent(session *types.Session, msg *messages.ClickEvent) error + InsertWebClickEvent(session *types.Session, msg *messages.MouseClick) error InsertWebInputEvent(session *types.Session, msg *messages.InputEvent) error InsertWebErrorEvent(session *types.Session, msg *types.ErrorEvent) error InsertWebPerformanceTrackAggr(session *types.Session, msg *messages.PerformanceTrackAggr) error @@ -32,6 +33,8 @@ type Connector interface { InsertCustom(session *types.Session, msg *messages.CustomEvent) error InsertGraphQL(session *types.Session, msg *messages.GraphQL) error InsertIssue(session *types.Session, msg *messages.IssueEvent) error + InsertWebInputDuration(session *types.Session, msg *messages.InputChange) error + InsertMouseThrashing(session *types.Session, msg *messages.MouseThrashing) error } type task struct { @@ -50,14 +53,25 @@ type connectorImpl struct { finished chan struct{} } +func getEnv(key, fallback string) string { + if value, ok := os.LookupEnv(key); ok { + return value + } + return fallback +} + func NewConnector(url string) Connector { license.CheckLicense() url = strings.TrimPrefix(url, "tcp://") url = strings.TrimSuffix(url, "/default") + userName := getEnv("CH_USERNAME", "default") + password := getEnv("CH_PASSWORD", "") conn, err := clickhouse.Open(&clickhouse.Options{ Addr: []string{url}, Auth: clickhouse.Auth{ Database: "default", + Username: userName, + Password: password, }, MaxOpenConns: 20, MaxIdleConns: 15, @@ -65,7 +79,6 @@ func NewConnector(url string) Connector { Compression: &clickhouse.Compression{ Method: clickhouse.CompressionLZ4, }, - // Debug: true, }) if err != nil { log.Fatal(err) @@ -97,7 +110,7 @@ var batches = map[string]string{ "autocompletes": "INSERT INTO experimental.autocomplete (project_id, type, value) VALUES (?, ?, ?)", "pages": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, request_start, response_start, response_end, dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, load_event_end, first_paint, first_contentful_paint_time, speed_index, visually_complete, time_to_interactive, event_type) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "clicks": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, label, hesitation_time, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)", - "inputs": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, label, event_type) VALUES (?, ?, ?, ?, ?, ?)", + "inputs": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, label, event_type, duration, hesitation_time) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", "errors": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, source, name, message, error_id, event_type, error_tags_keys, error_tags_values) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "performance": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, event_type) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "requests": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, request_body, response_body, status, method, duration, success, event_type) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?)", @@ -147,9 +160,7 @@ func (c *connectorImpl) worker() { for { select { case t := <-c.workerTask: - start := time.Now() c.sendBulks(t) - log.Printf("ch bulks dur: %d", time.Now().Sub(start).Milliseconds()) case <-c.done: for t := range c.workerTask { c.sendBulks(t) @@ -166,11 +177,59 @@ func (c *connectorImpl) checkError(name string, err error) { } } +func (c *connectorImpl) InsertWebInputDuration(session *types.Session, msg *messages.InputChange) error { + if msg.Label == "" { + return nil + } + if err := c.batches["inputs"].Append( + session.SessionID, + uint16(session.ProjectID), + msg.MsgID(), + datetime(msg.Timestamp), + msg.Label, + "INPUT", + nullableUint16(uint16(msg.InputDuration)), + nullableUint32(uint32(msg.HesitationTime)), + ); err != nil { + c.checkError("inputs", err) + return fmt.Errorf("can't append to inputs batch: %s", err) + } + return nil +} + +func (c *connectorImpl) InsertMouseThrashing(session *types.Session, msg *messages.MouseThrashing) error { + issueID := hashid.MouseThrashingID(session.ProjectID, session.SessionID, msg.Timestamp) + // Insert issue event to batches + if err := c.batches["issuesEvents"].Append( + session.SessionID, + uint16(session.ProjectID), + msg.MsgID(), + datetime(msg.Timestamp), + issueID, + "mouse_thrashing", + "ISSUE", + msg.Url, + ); err != nil { + c.checkError("issuesEvents", err) + return fmt.Errorf("can't append to issuesEvents batch: %s", err) + } + if err := c.batches["issues"].Append( + uint16(session.ProjectID), + issueID, + "mouse_thrashing", + msg.Url, + ); err != nil { + c.checkError("issues", err) + return fmt.Errorf("can't append to issues batch: %s", err) + } + return nil +} + func (c *connectorImpl) InsertIssue(session *types.Session, msg *messages.IssueEvent) error { issueID := hashid.IssueID(session.ProjectID, msg) // Check issue type before insert to avoid panic from clickhouse lib switch msg.Type { - case "click_rage", "dead_click", "excessive_scrolling", "bad_request", "missing_resource", "memory", "cpu", "slow_resource", "slow_page_load", "crash", "ml_cpu", "ml_memory", "ml_dead_click", "ml_click_rage", "ml_mouse_thrashing", "ml_excessive_scrolling", "ml_slow_resources", "custom", "js_exception": + case "click_rage", "dead_click", "excessive_scrolling", "bad_request", "missing_resource", "memory", "cpu", "slow_resource", "slow_page_load", "crash", "ml_cpu", "ml_memory", "ml_dead_click", "ml_click_rage", "ml_mouse_thrashing", "ml_excessive_scrolling", "ml_slow_resources", "custom", "js_exception", "mouse_thrashing": default: return fmt.Errorf("unknown issueType: %s", msg.Type) } @@ -242,28 +301,25 @@ func (c *connectorImpl) InsertWebSession(session *types.Session) error { return nil } -func (c *connectorImpl) InsertWebResourceEvent(session *types.Session, msg *messages.ResourceEvent) error { - var method interface{} = url.EnsureMethod(msg.Method) - if method == "" { - method = nil - } - resourceType := url.EnsureType(msg.Type) +func (c *connectorImpl) InsertWebResourceEvent(session *types.Session, msg *messages.ResourceTiming) error { + msgType := url.GetResourceType(msg.Initiator, msg.URL) + resourceType := url.EnsureType(msgType) if resourceType == "" { - return fmt.Errorf("can't parse resource type, sess: %s, type: %s", session.SessionID, msg.Type) + return fmt.Errorf("can't parse resource type, sess: %d, type: %s", session.SessionID, msgType) } if err := c.batches["resources"].Append( session.SessionID, uint16(session.ProjectID), - msg.MessageID, + msg.MsgID(), datetime(msg.Timestamp), url.DiscardURLQuery(msg.URL), - msg.Type, + msgType, nullableUint16(uint16(msg.Duration)), nullableUint16(uint16(msg.TTFB)), nullableUint16(uint16(msg.HeaderSize)), nullableUint32(uint32(msg.EncodedBodySize)), nullableUint32(uint32(msg.DecodedBodySize)), - msg.Success, + msg.Duration != 0, ); err != nil { c.checkError("resources", err) return fmt.Errorf("can't append to resources batch: %s", err) @@ -298,14 +354,14 @@ func (c *connectorImpl) InsertWebPageEvent(session *types.Session, msg *messages return nil } -func (c *connectorImpl) InsertWebClickEvent(session *types.Session, msg *messages.ClickEvent) error { +func (c *connectorImpl) InsertWebClickEvent(session *types.Session, msg *messages.MouseClick) error { if msg.Label == "" { return nil } if err := c.batches["clicks"].Append( session.SessionID, uint16(session.ProjectID), - msg.MessageID, + msg.MsgID(), datetime(msg.Timestamp), msg.Label, nullableUint32(uint32(msg.HesitationTime)), @@ -328,6 +384,8 @@ func (c *connectorImpl) InsertWebInputEvent(session *types.Session, msg *message datetime(msg.Timestamp), msg.Label, "INPUT", + nil, + nil, ); err != nil { c.checkError("inputs", err) return fmt.Errorf("can't append to inputs batch: %s", err) diff --git a/ee/connectors/msgcodec/messages.py b/ee/connectors/msgcodec/messages.py index 54f8df955..663ba8f99 100644 --- a/ee/connectors/msgcodec/messages.py +++ b/ee/connectors/msgcodec/messages.py @@ -315,35 +315,6 @@ class InputEvent(Message): self.label = label -class ClickEvent(Message): - __id__ = 33 - - def __init__(self, message_id, timestamp, hesitation_time, label, selector): - self.message_id = message_id - self.timestamp = timestamp - self.hesitation_time = hesitation_time - self.label = label - self.selector = selector - - -class ResourceEvent(Message): - __id__ = 35 - - def __init__(self, message_id, timestamp, duration, ttfb, header_size, encoded_body_size, decoded_body_size, url, type, success, method, status): - self.message_id = message_id - self.timestamp = timestamp - self.duration = duration - self.ttfb = ttfb - self.header_size = header_size - self.encoded_body_size = encoded_body_size - self.decoded_body_size = decoded_body_size - self.url = url - self.type = type - self.success = success - self.method = method - self.status = status - - class CSSInsertRule(Message): __id__ = 37 @@ -470,14 +441,7 @@ class SetNodeAttributeDict(Message): self.value_key = value_key -class DOMDrop(Message): - __id__ = 52 - - def __init__(self, timestamp): - self.timestamp = timestamp - - -class ResourceTiming(Message): +class ResourceTimingDeprecated(Message): __id__ = 53 def __init__(self, timestamp, duration, ttfb, header_size, encoded_body_size, decoded_body_size, url, initiator): @@ -744,6 +708,57 @@ class PartitionedMessage(Message): self.part_total = part_total +class InputChange(Message): + __id__ = 112 + + def __init__(self, id, value, value_masked, label, hesitation_time, input_duration): + self.id = id + self.value = value + self.value_masked = value_masked + self.label = label + self.hesitation_time = hesitation_time + self.input_duration = input_duration + + +class SelectionChange(Message): + __id__ = 113 + + def __init__(self, selection_start, selection_end, selection): + self.selection_start = selection_start + self.selection_end = selection_end + self.selection = selection + + +class MouseThrashing(Message): + __id__ = 114 + + def __init__(self, timestamp): + self.timestamp = timestamp + + +class UnbindNodes(Message): + __id__ = 115 + + def __init__(self, total_removed_percent): + self.total_removed_percent = total_removed_percent + + +class ResourceTiming(Message): + __id__ = 116 + + def __init__(self, timestamp, duration, ttfb, header_size, encoded_body_size, decoded_body_size, url, initiator, transferred_size, cached): + self.timestamp = timestamp + self.duration = duration + self.ttfb = ttfb + self.header_size = header_size + self.encoded_body_size = encoded_body_size + self.decoded_body_size = decoded_body_size + self.url = url + self.initiator = initiator + self.transferred_size = transferred_size + self.cached = cached + + class IssueEvent(Message): __id__ = 125 diff --git a/ee/connectors/msgcodec/msgcodec.py b/ee/connectors/msgcodec/msgcodec.py index 0ba21ea12..9aef2b475 100644 --- a/ee/connectors/msgcodec/msgcodec.py +++ b/ee/connectors/msgcodec/msgcodec.py @@ -321,31 +321,6 @@ class MessageCodec(Codec): label=self.read_string(reader) ) - if message_id == 33: - return ClickEvent( - message_id=self.read_uint(reader), - timestamp=self.read_uint(reader), - hesitation_time=self.read_uint(reader), - label=self.read_string(reader), - selector=self.read_string(reader) - ) - - if message_id == 35: - return ResourceEvent( - message_id=self.read_uint(reader), - timestamp=self.read_uint(reader), - duration=self.read_uint(reader), - ttfb=self.read_uint(reader), - header_size=self.read_uint(reader), - encoded_body_size=self.read_uint(reader), - decoded_body_size=self.read_uint(reader), - url=self.read_string(reader), - type=self.read_string(reader), - success=self.read_boolean(reader), - method=self.read_string(reader), - status=self.read_uint(reader) - ) - if message_id == 37: return CSSInsertRule( id=self.read_uint(reader), @@ -444,13 +419,8 @@ class MessageCodec(Codec): value_key=self.read_uint(reader) ) - if message_id == 52: - return DOMDrop( - timestamp=self.read_uint(reader) - ) - if message_id == 53: - return ResourceTiming( + return ResourceTimingDeprecated( timestamp=self.read_uint(reader), duration=self.read_uint(reader), ttfb=self.read_uint(reader), @@ -660,6 +630,47 @@ class MessageCodec(Codec): part_total=self.read_uint(reader) ) + if message_id == 112: + return InputChange( + id=self.read_uint(reader), + value=self.read_string(reader), + value_masked=self.read_boolean(reader), + label=self.read_string(reader), + hesitation_time=self.read_int(reader), + input_duration=self.read_int(reader) + ) + + if message_id == 113: + return SelectionChange( + selection_start=self.read_uint(reader), + selection_end=self.read_uint(reader), + selection=self.read_string(reader) + ) + + if message_id == 114: + return MouseThrashing( + timestamp=self.read_uint(reader) + ) + + if message_id == 115: + return UnbindNodes( + total_removed_percent=self.read_uint(reader) + ) + + if message_id == 116: + return ResourceTiming( + timestamp=self.read_uint(reader), + duration=self.read_uint(reader), + ttfb=self.read_uint(reader), + header_size=self.read_uint(reader), + encoded_body_size=self.read_uint(reader), + decoded_body_size=self.read_uint(reader), + url=self.read_string(reader), + initiator=self.read_string(reader), + transferred_size=self.read_uint(reader), + cached=self.read_boolean(reader) + ) + if message_id == 125: return IssueEvent( message_id=self.read_uint(reader), diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql b/ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql new file mode 100644 index 000000000..8191f8fd1 --- /dev/null +++ b/ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql @@ -0,0 +1,86 @@ +CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.11.0-ee'; + +ALTER TABLE experimental.events + MODIFY COLUMN issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20)); + +ALTER TABLE experimental.issues + MODIFY COLUMN type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20); + +DROP TABLE IF EXISTS experimental.js_errors_sessions_mv; +DROP TABLE IF EXISTS experimental.events_l7d_mv; + +ALTER TABLE experimental.events + DROP COLUMN IF EXISTS container_id, + DROP COLUMN IF EXISTS container_name, + DROP COLUMN IF EXISTS container_src, + DROP COLUMN IF EXISTS container_type; + + +CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.events_l7d_mv + ENGINE = ReplacingMergeTree(_timestamp) + PARTITION BY toYYYYMMDD(datetime) + ORDER BY (project_id, datetime, event_type, session_id, message_id) + TTL datetime + INTERVAL 7 DAY + POPULATE +AS +SELECT session_id, + project_id, + event_type, + datetime, + label, + hesitation_time, + name, + payload, + level, + source, + message, + error_id, + duration, + context, + url, + url_host, + url_path, + url_hostpath, + request_start, + response_start, + response_end, + dom_content_loaded_event_start, + dom_content_loaded_event_end, + load_event_start, + load_event_end, + first_paint, + first_contentful_paint_time, + speed_index, + visually_complete, + time_to_interactive, + ttfb, + ttlb, + response_time, + dom_building_time, + dom_content_loaded_event_time, + load_event_time, + min_fps, + avg_fps, + max_fps, + min_cpu, + avg_cpu, + max_cpu, + min_total_js_heap_size, + avg_total_js_heap_size, + max_total_js_heap_size, + min_used_js_heap_size, + avg_used_js_heap_size, + max_used_js_heap_size, + method, + status, + success, + request_body, + response_body, + issue_type, + issue_id, + error_tags_keys, + error_tags_values, + message_id, + _timestamp +FROM experimental.events +WHERE datetime >= now() - INTERVAL 7 DAY; \ No newline at end of file diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql index 9b2cfbbd1..f04e5d8c6 100644 --- a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql @@ -1,3 +1,4 @@ +CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.11.0-ee'; CREATE DATABASE IF NOT EXISTS experimental; CREATE TABLE IF NOT EXISTS experimental.autocomplete @@ -27,10 +28,6 @@ CREATE TABLE IF NOT EXISTS experimental.events error_id Nullable(String), duration Nullable(UInt16), context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)), - container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)), - container_id Nullable(String), - container_name Nullable(String), - container_src Nullable(String), url Nullable(String), url_host Nullable(String) MATERIALIZED lower(domain(url)), url_path Nullable(String) MATERIALIZED lower(pathFull(url)), @@ -78,7 +75,7 @@ CREATE TABLE IF NOT EXISTS experimental.events success Nullable(UInt8), request_body Nullable(String), response_body Nullable(String), - issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19)), + issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20)), issue_id Nullable(String), error_tags_keys Array(String), error_tags_values Array(Nullable(String)), @@ -200,7 +197,7 @@ CREATE TABLE IF NOT EXISTS experimental.issues ( project_id UInt16, issue_id String, - type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19), + type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20), context_string String, context_keys Array(String), context_values Array(Nullable(String)), @@ -231,10 +228,6 @@ SELECT session_id, error_id, duration, context, - container_type, - container_id, - container_name, - container_src, url, url_host, url_path, diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql new file mode 100644 index 000000000..6fd5c9dc7 --- /dev/null +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql @@ -0,0 +1,45 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.10.0-ee'; + next_version CONSTANT text := 'v1.11.0-ee'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.11.0-ee' +$$ LANGUAGE sql IMMUTABLE; + +ALTER TYPE issue_type ADD VALUE IF NOT EXISTS 'mouse_thrashing'; + +LOCK TABLE ONLY events.inputs IN ACCESS EXCLUSIVE MODE; +ALTER TABLE IF EXISTS events.inputs + ADD COLUMN IF NOT EXISTS duration integer NULL, + ADD COLUMN IF NOT EXISTS hesitation integer NULL; + +LOCK TABLE ONLY events.clicks IN ACCESS EXCLUSIVE MODE; +ALTER TABLE IF EXISTS events.clicks + ADD COLUMN IF NOT EXISTS hesitation integer NULL; + +LOCK TABLE ONLY public.projects IN ACCESS EXCLUSIVE MODE; +ALTER TABLE IF EXISTS public.projects + ALTER COLUMN gdpr SET DEFAULT '{ + "maskEmails": true, + "sampleRate": 33, + "maskNumbers": false, + "defaultInputMode": "obscured" + }'::jsonb; + +COMMIT; \ No newline at end of file diff --git a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql index 0b2945b39..0418c5d7d 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -253,7 +253,7 @@ $$ "maskEmails": true, "sampleRate": 33, "maskNumbers": false, - "defaultInputMode": "plain" + "defaultInputMode": "obscured" }'::jsonb, first_recorded_session_at timestamp without time zone NULL DEFAULT NULL, sessions_last_check_at timestamp without time zone NULL DEFAULT NULL, @@ -947,13 +947,14 @@ $$ CREATE TABLE IF NOT EXISTS events.clicks ( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - label text DEFAULT NULL, - url text DEFAULT '' NOT NULL, + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + label text DEFAULT NULL, + url text DEFAULT '' NOT NULL, path text, - selector text DEFAULT '' NOT NULL, + selector text DEFAULT '' NOT NULL, + hesitation integer DEFAULT NULL, PRIMARY KEY (session_id, message_id) ); CREATE INDEX IF NOT EXISTS clicks_session_id_idx ON events.clicks (session_id); @@ -974,8 +975,10 @@ $$ session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, message_id bigint NOT NULL, timestamp bigint NOT NULL, - label text DEFAULT NULL, - value text DEFAULT NULL, + label text DEFAULT NULL, + value text DEFAULT NULL, + duration integer DEFAULT NULL, + hesitation integer DEFAULT NULL, PRIMARY KEY (session_id, message_id) ); CREATE INDEX IF NOT EXISTS inputs_session_id_idx ON events.inputs (session_id); diff --git a/frontend/app/Router.js b/frontend/app/Router.js index 81fcaefc3..681f8a264 100644 --- a/frontend/app/Router.js +++ b/frontend/app/Router.js @@ -249,7 +249,7 @@ class Router extends React.Component { - {!existingTenant && } + {!isEnterprise && } diff --git a/frontend/app/api_middleware.js b/frontend/app/api_middleware.js index d00074945..9bfca6593 100644 --- a/frontend/app/api_middleware.js +++ b/frontend/app/api_middleware.js @@ -40,7 +40,7 @@ export default () => (next) => (action) => { }); }; -function parseError(e) { +export function parseError(e) { try { return [...JSON.parse(e).errors] || []; } catch { diff --git a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx index 18c717cf4..71a117868 100644 --- a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx +++ b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx @@ -20,6 +20,9 @@ import ScreenRecorder from 'App/components/Session_/ScreenRecorder/ScreenRecorde function onReject() { toast.info(`Call was rejected.`); } +function onControlReject() { + toast.info('Remote control request was rejected by user') +} function onError(e: any) { console.log(e); @@ -52,6 +55,7 @@ function AssistActions({ setCallArgs, requestReleaseRemoteControl, toggleAnnotation, + setRemoteControlCallbacks }, toggleUserName, } = player @@ -153,6 +157,7 @@ function AssistActions({ }; const requestControl = () => { + setRemoteControlCallbacks({ onReject: onControlReject }) if (callRequesting || remoteRequesting) return; requestReleaseRemoteControl(); }; diff --git a/frontend/app/components/Client/Integrations/AssistDoc/AssistScript.tsx b/frontend/app/components/Client/Integrations/AssistDoc/AssistScript.tsx index 107a33351..28b55e6fa 100644 --- a/frontend/app/components/Client/Integrations/AssistDoc/AssistScript.tsx +++ b/frontend/app/components/Client/Integrations/AssistDoc/AssistScript.tsx @@ -23,7 +23,7 @@ function AssistScript(props) { r.issue=function(k,p){r.push([6,k,p])}; r.isActive=function(){return false}; r.getSessionToken=function(){}; -})(0, "${props.projectKey}", "//static.openreplay.com/3.4.9/openreplay-assist.js",1,28); +})(0, "${props.projectKey}", "${window.env.TRACKER_HOST || '//static.openreplay.com'}/${window.env.TRACKER_VERSION}/openreplay-assist.js", 1, 28); `} diff --git a/frontend/app/components/Client/Sites/NewSiteForm.js b/frontend/app/components/Client/Sites/NewSiteForm.js index 6ce0f7d4c..0527668c3 100644 --- a/frontend/app/components/Client/Sites/NewSiteForm.js +++ b/frontend/app/components/Client/Sites/NewSiteForm.js @@ -58,9 +58,7 @@ export default class NewSiteForm extends React.PureComponent { siteList, location: { pathname }, } = this.props; - if (!site.exists() && siteList.some(({ name }) => name === site.name)) { - return this.setState({ existsError: true }); - } + if (site.exists()) { this.props.update(this.props.site, this.props.site.id).then((response) => { if (!response || !response.errors || response.errors.size === 0) { @@ -72,11 +70,16 @@ export default class NewSiteForm extends React.PureComponent { } }); } else { - this.props.save(this.props.site).then(() => { - this.props.onClose(null); - this.props.clearSearch(); - this.props.clearSearchLive(); - this.props.mstore.initClient(); + this.props.save(this.props.site).then((response) => { + if (!response || !response.errors || response.errors.size === 0) { + this.props.onClose(null); + this.props.clearSearch(); + this.props.clearSearchLive(); + this.props.mstore.initClient(); + toast.success('Project added successfully'); + } else { + toast.error(response.errors[0]); + } }); } }; diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertsList.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertsList.tsx index d1d4c84ef..7b4981d8e 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertsList.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertsList.tsx @@ -3,9 +3,9 @@ import { NoContent, Pagination } from 'UI'; import { filterList } from 'App/utils'; import { sliceListPerPage } from 'App/utils'; import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG'; -import AlertListItem from './AlertListItem' -import { useStore } from 'App/mstore' -import { observer } from 'mobx-react-lite' +import AlertListItem from './AlertListItem'; +import { useStore } from 'App/mstore'; +import { observer } from 'mobx-react-lite'; const pageSize = 10; @@ -15,14 +15,19 @@ interface Props { function AlertsList({ siteId }: Props) { const { alertsStore, settingsStore } = useStore(); - const { fetchWebhooks, webhooks } = settingsStore - const { alerts: alertsList, alertsSearch, fetchList, init } = alertsStore + const { fetchWebhooks, webhooks } = settingsStore; + const { alerts: alertsList, alertsSearch, fetchList, init } = alertsStore; const page = alertsStore.page; - React.useEffect(() => { fetchList(); fetchWebhooks() }, []); - const alertsArray = alertsList + React.useEffect(() => { + fetchList(); + fetchWebhooks(); + }, []); + const alertsArray = alertsList; - const filteredAlerts = filterList(alertsArray, alertsSearch, ['name'], (item, query) => query.test(item.query.left)) + const filteredAlerts = filterList(alertsArray, alertsSearch, ['name'], (item, query) => + query.test(item.query.left) + ); const list = alertsSearch !== '' ? filteredAlerts : alertsArray; return ( @@ -31,9 +36,12 @@ function AlertsList({ siteId }: Props) { title={
-
+
{alertsSearch !== '' ? 'No matching results' : "You haven't created any alerts yet"}
+
+ Alerts helps your team stay up to date with the activity on your app. +
} > diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertsView.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertsView.tsx index 544c86f8f..1c313b067 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertsView.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertsView.tsx @@ -38,10 +38,6 @@ function AlertsView({ siteId }: IAlertsView) {
-
- - Alerts helps your team stay up to date with the activity on your app. -
); diff --git a/frontend/app/components/Dashboard/components/DashboardHeader/DashboardHeader.tsx b/frontend/app/components/Dashboard/components/DashboardHeader/DashboardHeader.tsx index 5f312d124..072e2bf44 100644 --- a/frontend/app/components/Dashboard/components/DashboardHeader/DashboardHeader.tsx +++ b/frontend/app/components/Dashboard/components/DashboardHeader/DashboardHeader.tsx @@ -19,7 +19,7 @@ interface IProps { } type Props = IProps & RouteComponentProps; -const MAX_CARDS = 30 +const MAX_CARDS = 29; function DashboardHeader(props: Props) { const { siteId, dashboardId } = props; const { dashboardStore } = useStore(); @@ -71,7 +71,7 @@ function DashboardHeader(props: Props) { + {dashboard?.name} } @@ -118,7 +118,7 @@ function DashboardHeader(props: Props) {
{/* @ts-ignore */} - +

onEdit(false)} diff --git a/frontend/app/components/Dashboard/components/DashboardList/DashboardList.tsx b/frontend/app/components/Dashboard/components/DashboardList/DashboardList.tsx index 18c96f12a..a4551f854 100644 --- a/frontend/app/components/Dashboard/components/DashboardList/DashboardList.tsx +++ b/frontend/app/components/Dashboard/components/DashboardList/DashboardList.tsx @@ -17,19 +17,19 @@ function DashboardList() { show={lenth === 0} title={
+
{dashboardsSearch !== '' ? ( 'No matching results' ) : (
-
Create your first Dashboard
+
You haven't created any dashboards yet
- A dashboard lets you visualize trends and insights of data captured by OpenReplay. + A Dashboard is a collection of Cards that can be shared across teams.
)}
-
} > diff --git a/frontend/app/components/Dashboard/components/DashboardList/Header.tsx b/frontend/app/components/Dashboard/components/DashboardList/Header.tsx index 9a8e9de22..00fc42349 100644 --- a/frontend/app/components/Dashboard/components/DashboardList/Header.tsx +++ b/frontend/app/components/Dashboard/components/DashboardList/Header.tsx @@ -34,10 +34,6 @@ function Header({ history, siteId }: { history: any; siteId: string }) {

-
- - A Dashboard is a collection of Cards that can be shared across teams. -
-
- - Create custom Cards to capture key interactions and track KPIs. -
-
- - + +
+ + metricStore.updateKey('filter', { ...filter, dashboard: value }) + } + /> +
+ +
+ + void; }) { const { metricStore, dashboardStore } = useStore(); - const metricsSearch = metricStore.metricsSearch; + const metricsSearch = metricStore.filter.query; const listView = useObserver(() => metricStore.listView); const [selectedMetrics, setSelectedMetrics] = useState([]); @@ -54,9 +54,12 @@ function MetricsList({ title={
-
+
{metricsSearch !== '' ? 'No matching results' : "You haven't created any cards yet"}
+
+ Create cards to capture key interactions and track KPIs. +
} > @@ -70,7 +73,7 @@ function MetricsList({ toggleSelection={toggleMetricSelection} allSelected={cards.length === selectedMetrics.length} toggleAll={({ target: { checked, name } }) => - setSelectedMetrics(checked ? cards.map((i: any) => i.metricId) : []) + setSelectedMetrics(checked ? cards.map((i: any) => i.metricId).slice(0, 30 - existingCardIds!.length) : []) } /> ) : ( diff --git a/frontend/app/components/Dashboard/components/WidgetName/WidgetName.tsx b/frontend/app/components/Dashboard/components/WidgetName/WidgetName.tsx index a560235e9..2d734e236 100644 --- a/frontend/app/components/Dashboard/components/WidgetName/WidgetName.tsx +++ b/frontend/app/components/Dashboard/components/WidgetName/WidgetName.tsx @@ -64,7 +64,7 @@ function WidgetName(props: Props) { /> ) : ( // @ts-ignore - +
setEditing(true)} className={ diff --git a/frontend/app/components/ForgotPassword/ForgotPassword.js b/frontend/app/components/ForgotPassword/ForgotPassword.js index 1cd40c679..376618074 100644 --- a/frontend/app/components/ForgotPassword/ForgotPassword.js +++ b/frontend/app/components/ForgotPassword/ForgotPassword.js @@ -14,21 +14,23 @@ const LOGIN = loginRoute(); const recaptchaRef = React.createRef(); const ERROR_DONT_MATCH = "Passwords don't match."; const MIN_LENGTH = 8; -const PASSWORD_POLICY = `Password should contain at least ${ MIN_LENGTH } symbols.`; +const PASSWORD_POLICY = `Password should contain at least ${MIN_LENGTH} symbols.`; const checkDontMatch = (newPassword, newPasswordRepeat) => newPasswordRepeat.length > 0 && newPasswordRepeat !== newPassword; @connect( (state, props) => ({ - errors: state.getIn([ 'user', 'requestResetPassowrd', 'errors' ]), - resetErrors: state.getIn([ 'user', 'resetPassword', 'errors' ]), - loading: state.getIn([ 'user', 'requestResetPassowrd', 'loading' ]) || state.getIn([ 'user', 'resetPassword', 'loading' ]), - params: new URLSearchParams(props.location.search) + errors: state.getIn(['user', 'requestResetPassowrd', 'errors']), + resetErrors: state.getIn(['user', 'resetPassword', 'errors']), + loading: + state.getIn(['user', 'requestResetPassowrd', 'loading']) || + state.getIn(['user', 'resetPassword', 'loading']), + params: new URLSearchParams(props.location.search), }), - { requestResetPassword, resetPassword, resetErrors }, + { requestResetPassword, resetPassword, resetErrors } ) -@withPageTitle("Password Reset - OpenReplay") +@withPageTitle('Password Reset - OpenReplay') @withRouter export default class ForgotPassword extends React.PureComponent { state = { @@ -45,15 +47,17 @@ export default class ForgotPassword extends React.PureComponent { const { email, password } = this.state; const { params } = this.props; - const pass = params.get('pass') - const invitation = params.get('invitation') - const resetting = pass && invitation + const pass = params.get('pass'); + const invitation = params.get('invitation'); + const resetting = pass && invitation; if (!resetting) { - this.props.requestResetPassword({ email: email.trim(), 'g-recaptcha-response': token }).then(() => { - const { errors } = this.props; - if (!errors) this.setState({ requested: true }); - }); + this.props + .requestResetPassword({ email: email.trim(), 'g-recaptcha-response': token }) + .then(() => { + const { errors } = this.props; + if (!errors) this.setState({ requested: true }); + }); } else { if (this.isSubmitDisabled()) return; this.props.resetPassword({ email: email.trim(), invitation, pass, password }).then(() => { @@ -61,16 +65,15 @@ export default class ForgotPassword extends React.PureComponent { if (!resetErrors) this.setState({ updated: true }); }); } - } + }; isSubmitDisabled() { const { password, passwordRepeat } = this.state; - if (password !== passwordRepeat || - password.length < MIN_LENGTH) return true; + if (password !== passwordRepeat || password.length < MIN_LENGTH) return true; return false; } - write = ({ target: { value, name } }) => this.setState({ [ name ]: value }) + write = ({ target: { value, name } }) => this.setState({ [name]: value }); shouldShouwPolicy() { const { password } = this.state; @@ -79,152 +82,172 @@ export default class ForgotPassword extends React.PureComponent { return true; } - onSubmit = e => { + onSubmit = (e) => { e.preventDefault(); const { CAPTCHA_ENABLED } = this.state; if (CAPTCHA_ENABLED && recaptchaRef.current) { - recaptchaRef.current.execute() + recaptchaRef.current.execute(); } else if (!CAPTCHA_ENABLED) { this.handleSubmit(); } - } + }; componentWillUnmount() { - this.props.resetErrors() + this.props.resetErrors(); } render() { const { CAPTCHA_ENABLED } = this.state; const { errors, loading, params } = this.props; const { requested, updated, password, passwordRepeat, email } = this.state; - const dontMatch = checkDontMatch(password, passwordRepeat); - - const pass = params.get('pass') - const invitation = params.get('invitation') - const resetting = pass && invitation - const validEmail = validateEmail(email) + const dontMatch = checkDontMatch(password, passwordRepeat); + + const pass = params.get('pass'); + const invitation = params.get('invitation'); + const resetting = pass && invitation; + const validEmail = validateEmail(email); return ( -
-
-
- +
+
+
+
-
-
-
Welcome Back!
-
-
-
-
-
-
-

{`${resetting ? 'Create' : 'Reset'} Password`}

-
- -
- { CAPTCHA_ENABLED && ( -
- this.handleSubmit(token) } - /> -
- )} +
+ {!resetting &&

Reset Password

} + {resetting &&

+ Welcome, join your organization by
creating a new password +

+ } - { !resetting && !requested && - - - - - } - - { - requested && !errors && ( -
Reset password link has been sent to your email.
- ) - } - - { - resetting && ( - - - - + + +
+ {CAPTCHA_ENABLED && ( +
+ this.handleSubmit(token)} /> - -
- { PASSWORD_POLICY }
+ )} + + {!resetting && !requested && ( - + - - ) - } + )} - -
- -
- { errors && -
- { errors.map(error => { error }
) } + {requested && !errors && ( +
+
+ +
+
Alright! a reset link was emailed to {email}. Click on it to reset your account password.
+
+ )} + + {resetting && ( + + + + {/* */} + + +
+ {PASSWORD_POLICY} +
+ + + + +
+ )} + + +
+ +
+ {errors && ( +
+ {errors.map((error) => ( + + {error} +
+
+ ))} +
+ )} +
+ + {'Your password has been updated sucessfully.'} +
- } -
- - { 'Your password has been updated sucessfully.' } -
-
- {/*
*/} - {!(updated || requested) && ( - - )} -
- - { updated && ()} -
{'Back to Login'}
- -
- {/*
*/} - + {!(updated || requested) && ( + + )} + +
+ + {updated && ( + + )} +
{'Back to Login'}
+ +
+ +
+
); diff --git a/frontend/app/components/Header/Header.js b/frontend/app/components/Header/Header.js index 7ef0028c9..021f96df3 100644 --- a/frontend/app/components/Header/Header.js +++ b/frontend/app/components/Header/Header.js @@ -19,6 +19,7 @@ import UserMenu from './UserMenu'; import SettingsMenu from './SettingsMenu'; import DefaultMenuView from './DefaultMenuView'; import PreferencesView from './PreferencesView'; +import HealthStatus from './HealthStatus' const CLIENT_PATH = client(CLIENT_DEFAULT_TAB); @@ -78,6 +79,8 @@ const Header = (props) => {
+ +
diff --git a/frontend/app/components/Header/HealthStatus/HealthModal/Footer.tsx b/frontend/app/components/Header/HealthStatus/HealthModal/Footer.tsx new file mode 100644 index 000000000..4aace8e06 --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/HealthModal/Footer.tsx @@ -0,0 +1,43 @@ +import React from 'react'; +import { Icon } from 'UI'; +import cn from 'classnames' + +function Footer({ isSetup }: { isSetup?: boolean }) { + return ( + + ); +} + +export default Footer; diff --git a/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx b/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx new file mode 100644 index 000000000..f8dc428a6 --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx @@ -0,0 +1,148 @@ +import React from 'react'; +// @ts-ignore +import slide from 'App/svg/cheers.svg'; +import { Button, Loader } from 'UI'; +import Footer from './Footer'; +import { getHighest } from 'App/constants/zindex'; +import Category from 'Components/Header/HealthStatus/ServiceCategory'; +import SubserviceHealth from 'Components/Header/HealthStatus/SubserviceHealth/SubserviceHealth'; +import { IServiceStats } from '../HealthStatus'; + +function HealthModal({ + getHealth, + isLoading, + healthResponse, + setShowModal, + setPassed, +}: { + getHealth: () => void; + isLoading: boolean; + healthResponse: { overallHealth: boolean; healthMap: Record }; + setShowModal: (isOpen: boolean) => void; + setPassed?: () => void; +}) { + const [selectedService, setSelectedService] = React.useState(''); + + React.useEffect(() => { + if (!healthResponse?.overallHealth) { + if (healthResponse?.healthMap) { + setSelectedService( + Object.keys(healthResponse.healthMap).filter( + (s) => !healthResponse.healthMap[s].healthOk + )[0] + ); + } + } + }, [healthResponse]); + + const handleClose = () => { + setShowModal(false); + }; + + const isSetup = document.location.pathname.includes('/signup') + + return ( +
+
e.stopPropagation()} + className={'flex flex-col bg-white rounded border border-figmaColors-divider'} + > +
+
Installation Status
+ +
+ + +
+
+ {isLoading ? null + : Object.keys(healthResponse.healthMap).map((service) => ( + + setSelectedService(service)} + healthOk={healthResponse.healthMap[service].healthOk} + name={healthResponse.healthMap[service].name} + isSelectable + isSelected={selectedService === service} + /> + + ))} +
+
+ {isLoading ? null : selectedService ? ( + + ) : + } +
+
+ {isSetup ? ( +
+ +
+ ) : null} +
+
+
+
+ ); +} + +function ServiceStatus({ service }: { service: Record }) { + const { subservices } = service; + return ( +
+
+ {Object.keys(subservices).map((subservice: string) => ( + + + + ))} +
+
+ ); +} + +export default HealthModal; diff --git a/frontend/app/components/Header/HealthStatus/HealthStatus.tsx b/frontend/app/components/Header/HealthStatus/HealthStatus.tsx new file mode 100644 index 000000000..64e3a8524 --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/HealthStatus.tsx @@ -0,0 +1,92 @@ +import React from 'react'; +import { Icon } from 'UI'; +import HealthModal from 'Components/Header/HealthStatus/HealthModal/HealthModal'; +import { lastAskedKey, healthResponseKey } from './const'; +import HealthWidget from "Components/Header/HealthStatus/HealthWidget"; +import { getHealthRequest } from './getHealth' + +export interface IServiceStats { + name: 'backendServices' | 'databases' | 'ingestionPipeline' | 'ssl'; + serviceName: string; + healthOk: boolean; + subservices: { + health: boolean; + details?: { + errors?: string[]; + version?: string; + } + }[] +} + + +function HealthStatus() { + const healthResponseSaved = localStorage.getItem(healthResponseKey) || '{}'; + const [healthResponse, setHealthResponse] = React.useState(JSON.parse(healthResponseSaved)); + const [isError, setIsError] = React.useState(false); + const [isLoading, setIsLoading] = React.useState(false); + const lastAskedSaved = localStorage.getItem(lastAskedKey); + const [lastAsked, setLastAsked] = React.useState(lastAskedSaved); + const [showModal, setShowModal] = React.useState(false); + + const getHealth = async () => { + if (isLoading) return; + try { + setIsLoading(true); + const { healthMap, asked } = await getHealthRequest(); + setHealthResponse(healthMap); + setLastAsked(asked.toString()); + } catch (e) { + console.error(e); + setIsError(true); + } finally { + setIsLoading(false); + } + }; + + React.useEffect(() => { + const now = new Date(); + const lastAskedDate = lastAsked ? new Date(parseInt(lastAsked, 10)) : null; + const diff = lastAskedDate ? now.getTime() - lastAskedDate.getTime() : 0; + const diffInMinutes = Math.round(diff / 1000 / 60); + if (Object.keys(healthResponse).length === 0 || !lastAskedDate || diffInMinutes > 10) { + void getHealth(); + } + }, []); + + const icon = !isError && healthResponse?.overallHealth ? 'pulse' : ('exclamation-circle-fill' as const); + return ( + <> +
+
+
+ +
+
+ + +
+ {showModal ? ( + + ) : null} + + ); +} + + +export default HealthStatus; diff --git a/frontend/app/components/Header/HealthStatus/HealthWidget.tsx b/frontend/app/components/Header/HealthStatus/HealthWidget.tsx new file mode 100644 index 000000000..8cccef697 --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/HealthWidget.tsx @@ -0,0 +1,98 @@ +import React from 'react' +import { Icon } from "UI"; +import ServiceCategory from "Components/Header/HealthStatus/ServiceCategory"; +import cn from 'classnames' +import { IServiceStats } from './HealthStatus' + +function HealthWidget({ + healthResponse, + getHealth, + isLoading, + lastAsked, + setShowModal, + isError, +}: { + healthResponse: { overallHealth: boolean; healthMap: Record }; + getHealth: Function; + isLoading: boolean; + lastAsked: string | null; + setShowModal: (visible: boolean) => void; + isError?: boolean; +}) { + const [lastAskedDiff, setLastAskedDiff] = React.useState(0); + const healthOk = healthResponse?.overallHealth; + + React.useEffect(() => { + const now = new Date(); + const lastAskedDate = lastAsked ? new Date(parseInt(lastAsked, 10)) : null; + const diff = lastAskedDate ? now.getTime() - lastAskedDate.getTime() : 0; + const diffInMinutes = Math.round(diff / 1000 / 60); + setLastAskedDiff(diffInMinutes); + }, [lastAsked]); + + const title = !isError && healthOk ? 'All Systems Operational' : 'Service disruption'; + const icon = !isError && healthOk ? ('check-circle-fill' as const) : ('exclamation-circle-fill' as const); + + const problematicServices = Object.values(healthResponse?.healthMap || {}).filter( + (service: Record) => !service.healthOk + ) + + return ( +
+
+
+ + {title} +
+
+ Last checked {lastAskedDiff} mins. ago +
getHealth()} + > + +
+
+ {isError &&
Error getting service health status
} + +
+ {!isError && !healthOk ? ( + <> +
+
+ Observed installation Issue with the following +
+ {problematicServices.map((service) => ( + + setShowModal(true)} + healthOk={false} + name={service.name} + isSelectable + noBorder={problematicServices.length === 1} + /> + + ))} + + ) : null} +
+
+
+ ); +} + +export default HealthWidget \ No newline at end of file diff --git a/frontend/app/components/Header/HealthStatus/ServiceCategory.tsx b/frontend/app/components/Header/HealthStatus/ServiceCategory.tsx new file mode 100644 index 000000000..4a9e1bc15 --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/ServiceCategory.tsx @@ -0,0 +1,52 @@ +import { Icon } from 'UI'; +import React from 'react'; +import cn from 'classnames'; +import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG'; + +function Category({ + name, + healthOk, + onClick, + isSelectable, + isExpandable, + isExpanded, + isSelected, + isLoading, + noBorder, +}: { + name: string; + healthOk?: boolean; + isLoading?: boolean; + onClick: (args: any) => void; + isSelectable?: boolean; + isExpandable?: boolean; + isExpanded?: boolean; + isSelected?: boolean; + noBorder?: boolean; +}) { + + const icon = healthOk ? ('check-circle-fill' as const) : ('exclamation-circle-fill' as const); + return ( +
+ {isLoading ? ( + + ) : } + {name} + + {isSelectable ? : null} + {isExpandable ? ( + + ) : null} +
+ ); +} + +export default Category \ No newline at end of file diff --git a/frontend/app/components/Header/HealthStatus/SubserviceHealth/SubserviceHealth.tsx b/frontend/app/components/Header/HealthStatus/SubserviceHealth/SubserviceHealth.tsx new file mode 100644 index 000000000..c3742ece6 --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/SubserviceHealth/SubserviceHealth.tsx @@ -0,0 +1,48 @@ +import React from 'react'; +import Category from 'Components/Header/HealthStatus/ServiceCategory'; +import cn from 'classnames'; + +function SubserviceHealth({ + subservice, + name, +}: { + name: string; + subservice: { health: boolean; details: { errors?: string[]; version?: string } }; +}) { + const [isExpanded, setIsExpanded] = React.useState(!subservice?.health); + + const isExpandable = subservice?.details && Object.keys(subservice?.details).length > 0; + return ( +
+ (isExpandable ? setIsExpanded(!isExpanded) : null)} + name={name} + healthOk={subservice?.health} + isExpandable={isExpandable} + isExpanded={isExpanded} + /> + {isExpanded ? ( +
+ {subservice?.details?.version ? ( +
+
Version
+
+ {subservice?.details?.version} +
+
+ ) : null} + {subservice?.details?.errors?.length ? ( +
+
Error log:
+ {subservice.details.errors.toString()} +
+ ) : subservice?.health ? null : ( + 'Service not responding' + )} +
+ ) : null} +
+ ); +} + +export default SubserviceHealth; diff --git a/frontend/app/components/Header/HealthStatus/const.ts b/frontend/app/components/Header/HealthStatus/const.ts new file mode 100644 index 000000000..69b5b1c5e --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/const.ts @@ -0,0 +1,9 @@ +export const categoryKeyNames = { + backendServices: 'Backend Services', + databases: 'Databases', + ingestionPipeline: 'Ingestion Pipeline', + ssl: 'SSL', +} as const + +export const lastAskedKey = '__openreplay_health_status'; +export const healthResponseKey = '__openreplay_health_response'; \ No newline at end of file diff --git a/frontend/app/components/Header/HealthStatus/getHealth.ts b/frontend/app/components/Header/HealthStatus/getHealth.ts new file mode 100644 index 000000000..70bd8914c --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/getHealth.ts @@ -0,0 +1,36 @@ +import { healthService } from 'App/services'; +import { categoryKeyNames, lastAskedKey, healthResponseKey } from "Components/Header/HealthStatus/const"; +import { IServiceStats } from "Components/Header/HealthStatus/HealthStatus"; + + +function mapResponse(resp: Record) { + const services = Object.keys(resp); + const healthMap: Record = {}; + services.forEach((service) => { + healthMap[service] = { + // @ts-ignore + name: categoryKeyNames[service], + healthOk: true, + subservices: resp[service], + serviceName: service, + }; + Object.values(healthMap[service].subservices).forEach((subservice: Record) => { + if (!subservice?.health) healthMap[service].healthOk = false; + }); + }); + + const overallHealth = Object.values(healthMap).every( + (service: Record) => service.healthOk + ); + + return { overallHealth, healthMap }; +} + +export async function getHealthRequest() { + const r = await healthService.fetchStatus(); + const healthMap = mapResponse(r); + const asked = new Date().getTime(); + localStorage.setItem(healthResponseKey, JSON.stringify(healthMap)); + localStorage.setItem(lastAskedKey, asked.toString()); + return { healthMap, asked } +} \ No newline at end of file diff --git a/frontend/app/components/Header/HealthStatus/index.ts b/frontend/app/components/Header/HealthStatus/index.ts new file mode 100644 index 000000000..1f4ce8576 --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/index.ts @@ -0,0 +1 @@ +export { default } from './HealthStatus' \ No newline at end of file diff --git a/frontend/app/components/Header/PreferencesView/PreferencesView.tsx b/frontend/app/components/Header/PreferencesView/PreferencesView.tsx index f9da1b933..735fdeaa9 100644 --- a/frontend/app/components/Header/PreferencesView/PreferencesView.tsx +++ b/frontend/app/components/Header/PreferencesView/PreferencesView.tsx @@ -18,7 +18,7 @@ function PreferencesView(props: Props) {
- Changes applied at organization level + Updates are be applied at organization level.
); diff --git a/frontend/app/components/Header/SettingsMenu/SettingsMenu.tsx b/frontend/app/components/Header/SettingsMenu/SettingsMenu.tsx index 2faf5e128..2b8963295 100644 --- a/frontend/app/components/Header/SettingsMenu/SettingsMenu.tsx +++ b/frontend/app/components/Header/SettingsMenu/SettingsMenu.tsx @@ -35,8 +35,8 @@ function SettingsMenu(props: RouteComponentProps) { }; return (
{isAdmin && ( <> diff --git a/frontend/app/components/Header/UserMenu/UserMenu.tsx b/frontend/app/components/Header/UserMenu/UserMenu.tsx index c54d582b0..ddc3cc2f1 100644 --- a/frontend/app/components/Header/UserMenu/UserMenu.tsx +++ b/frontend/app/components/Header/UserMenu/UserMenu.tsx @@ -24,7 +24,7 @@ function UserMenu(props: RouteComponentProps) { return (
diff --git a/frontend/app/components/Login/Login.js b/frontend/app/components/Login/Login.js index 05702a2fe..e4dc763b3 100644 --- a/frontend/app/components/Login/Login.js +++ b/frontend/app/components/Login/Login.js @@ -36,9 +36,22 @@ class Login extends React.Component { CAPTCHA_ENABLED: window.env.CAPTCHA_ENABLED === 'true', }; + static getDerivedStateFromProps(nextProps, prevState) { + const { authDetails } = nextProps; + if (Object.keys(authDetails).length === 0) { + return null; + } + + if (!authDetails.tenants) { + nextProps.history.push(SIGNUP_ROUTE); + } + + return null; + } + componentDidMount() { const { params } = this.props; - this.props.fetchTenants(); + this.props.fetchTenants() const jwt = params.get('jwt'); if (jwt) { this.props.setJwt(jwt); @@ -69,18 +82,16 @@ class Login extends React.Component { const { CAPTCHA_ENABLED } = this.state; return ( -
-
-
- +
+
+
+
- -
-
-
+
-
-

Login to OpenReplay

+

Login to your account

+
+ {!authDetails.tenants && (
Don't have an account?{' '} @@ -99,22 +110,23 @@ class Login extends React.Component { onChange={(token) => this.handleSubmit(token)} /> )} -
-
- +
+ + -
-
+ + -
+
{errors && errors.length ? ( @@ -142,23 +154,22 @@ class Login extends React.Component { ))}
) : null} - {/* + +
diff --git a/frontend/app/components/Onboarding/components/OnboardingTabs/ProjectCodeSnippet/ProjectCodeSnippet.js b/frontend/app/components/Onboarding/components/OnboardingTabs/ProjectCodeSnippet/ProjectCodeSnippet.js index 7bf9ef5bb..837281e8f 100644 --- a/frontend/app/components/Onboarding/components/OnboardingTabs/ProjectCodeSnippet/ProjectCodeSnippet.js +++ b/frontend/app/components/Onboarding/components/OnboardingTabs/ProjectCodeSnippet/ProjectCodeSnippet.js @@ -56,7 +56,7 @@ const ProjectCodeSnippet = props => { r.issue=function(k,p){r.push([6,k,p])}; r.isActive=function(){return false}; r.getSessionToken=function(){}; - })("//static.openreplay.com/${window.env.TRACKER_VERSION}/openreplay.js",1,0,initOpts,startOpts); + })("${window.env.TRACKER_HOST || '//static.openreplay.com'}/${window.env.TRACKER_VERSION}/openreplay.js",1,0,initOpts,startOpts); `; const saveGDPR = (value) => { diff --git a/frontend/app/components/Overview/Overview.tsx b/frontend/app/components/Overview/Overview.tsx index 78b4bfe2b..1ef003f7a 100644 --- a/frontend/app/components/Overview/Overview.tsx +++ b/frontend/app/components/Overview/Overview.tsx @@ -4,25 +4,29 @@ import NoSessionsMessage from 'Shared/NoSessionsMessage'; import MainSearchBar from 'Shared/MainSearchBar'; import SessionSearch from 'Shared/SessionSearch'; import SessionListContainer from 'Shared/SessionListContainer/SessionListContainer'; +import cn from 'classnames'; +import OverviewMenu from 'Shared/OverviewMenu'; function Overview() { - return ( -
-
-
- + return ( +
+
+ +
+
+
+ + + -
- - - -
- -
-
-
+
+
- ); +
+
+ ); } export default withPageTitle('Sessions - OpenReplay')(Overview); diff --git a/frontend/app/components/Session/LivePlayer.tsx b/frontend/app/components/Session/LivePlayer.tsx index cace337e8..f558c3103 100644 --- a/frontend/app/components/Session/LivePlayer.tsx +++ b/frontend/app/components/Session/LivePlayer.tsx @@ -1,7 +1,6 @@ import React from 'react'; import { useEffect, useState } from 'react'; import { connect } from 'react-redux'; -import withRequest from 'HOCs/withRequest'; import withPermissions from 'HOCs/withPermissions'; import { PlayerContext, defaultContextValue, ILivePlayerContext } from './playerContext'; import { makeAutoObservable } from 'mobx'; @@ -11,6 +10,7 @@ import PlayerBlock from './Player/LivePlayer/LivePlayerBlock'; import styles from '../Session_/session.module.css'; import Session from 'App/mstore/types/session'; import withLocationHandlers from 'HOCs/withLocationHandlers'; +import APIClient from 'App/api_client'; interface Props { session: Session; @@ -28,14 +28,12 @@ interface Props { function LivePlayer({ session, loadingCredentials, - assistCredentials, - request, - isEnterprise, userEmail, userName, isMultiview, customSession, - query + query, + isEnterprise }: Props) { // @ts-ignore const [contextValue, setContextValue] = useState(defaultContextValue); @@ -52,13 +50,21 @@ function LivePlayer({ name: userName, }, }; - const [player, store] = createLiveWebPlayer(sessionWithAgentData, assistCredentials, (state) => - makeAutoObservable(state) - ); - setContextValue({ player, store }); - - return () => player.clean(); - }, [session.sessionId, assistCredentials]); + if (isEnterprise) { + new APIClient().get('/config/assist/credentials').then(r => r.json()) + .then(({ data }) => { + const [player, store] = createLiveWebPlayer(sessionWithAgentData, data, (state) => + makeAutoObservable(state) + ); + setContextValue({ player, store }); + }) + } else { + const [player, store] = createLiveWebPlayer(sessionWithAgentData, null, (state) => + makeAutoObservable(state) + ); + setContextValue({ player, store }); + } + }, [session.sessionId]); // LAYOUT (TODO: local layout state - useContext or something..) useEffect(() => { @@ -70,8 +76,10 @@ function LivePlayer({ setFullView(true); } - if (isEnterprise) { - request(); + return () => { + contextValue.player?.clean?.(); + // @ts-ignore default empty + setContextValue(defaultContextValue) } }, []); @@ -98,13 +106,7 @@ function LivePlayer({ ); } -export default withRequest({ - initialData: null, - endpoint: '/assist/credentials', - dataName: 'assistCredentials', - loadingName: 'loadingCredentials', -})( - withPermissions( +export default withPermissions( ['ASSIST_LIVE'], '', true @@ -121,4 +123,3 @@ export default withRequest({ } )(withLocationHandlers()(LivePlayer)) ) -); diff --git a/frontend/app/components/Session/Player/ClickMapRenderer/ThinPlayer.tsx b/frontend/app/components/Session/Player/ClickMapRenderer/ThinPlayer.tsx index 886bb848c..1855a5046 100644 --- a/frontend/app/components/Session/Player/ClickMapRenderer/ThinPlayer.tsx +++ b/frontend/app/components/Session/Player/ClickMapRenderer/ThinPlayer.tsx @@ -25,7 +25,11 @@ function WebPlayer(props: any) { ); setContextValue({ player: WebPlayerInst, store: PlayerStore }); - return () => WebPlayerInst.clean(); + return () => { + WebPlayerInst.clean(); + // @ts-ignore + setContextValue(defaultContextValue); + } }, [session.sessionId]); const isPlayerReady = contextValue.store?.get().ready diff --git a/frontend/app/components/Session/Player/LivePlayer/LivePlayerInst.tsx b/frontend/app/components/Session/Player/LivePlayer/LivePlayerInst.tsx index c17007648..148b203c8 100644 --- a/frontend/app/components/Session/Player/LivePlayer/LivePlayerInst.tsx +++ b/frontend/app/components/Session/Player/LivePlayer/LivePlayerInst.tsx @@ -29,7 +29,6 @@ function Player(props: IProps) { const screenWrapper = React.useRef(null); const ready = playerContext.store.get().ready - console.log(ready) React.useEffect(() => { if (!props.closedLive || isMultiview) { const parentElement = findDOMNode(screenWrapper.current) as HTMLDivElement | null; //TODO: good architecture diff --git a/frontend/app/components/Session/Player/ReplayPlayer/EventsBlock/UserCard/UserCard.js b/frontend/app/components/Session/Player/ReplayPlayer/EventsBlock/UserCard/UserCard.js index e6893ca3e..ef80fd0bf 100644 --- a/frontend/app/components/Session/Player/ReplayPlayer/EventsBlock/UserCard/UserCard.js +++ b/frontend/app/components/Session/Player/ReplayPlayer/EventsBlock/UserCard/UserCard.js @@ -79,7 +79,7 @@ function UserCard({ className, request, session, width, height, similarSessions, render={() => (
} + comp={} label={countries[userCountry]} value={{formatTimeOrDate(startedAt)}} /> diff --git a/frontend/app/components/Session/Player/ReplayPlayer/PlayerBlockHeader.tsx b/frontend/app/components/Session/Player/ReplayPlayer/PlayerBlockHeader.tsx index 162f92237..7fc8eabe3 100644 --- a/frontend/app/components/Session/Player/ReplayPlayer/PlayerBlockHeader.tsx +++ b/frontend/app/components/Session/Player/ReplayPlayer/PlayerBlockHeader.tsx @@ -24,7 +24,8 @@ function PlayerBlockHeader(props: any) { const [hideBack, setHideBack] = React.useState(false); const { player, store } = React.useContext(PlayerContext); - const { width, height, showEvents } = store.get(); + const playerState = store?.get?.() || { width: 0, height: 0, showEvents: false } + const { width = 0, height = 0, showEvents = false } = playerState const { session, diff --git a/frontend/app/components/Session/Session.js b/frontend/app/components/Session/Session.js index bece203c8..75a96433b 100644 --- a/frontend/app/components/Session/Session.js +++ b/frontend/app/components/Session/Session.js @@ -2,7 +2,7 @@ import React from 'react'; import { useEffect, useState } from 'react'; import { connect } from 'react-redux'; import usePageTitle from 'App/hooks/usePageTitle'; -import { fetch as fetchSession, clearCurrentSession } from 'Duck/sessions'; +import { fetchV2, clearCurrentSession } from "Duck/sessions"; import { fetchList as fetchSlackList } from 'Duck/integrations/slack'; import { Link, NoContent, Loader } from 'UI'; import { sessions as sessionsRoute } from 'App/routes'; @@ -17,7 +17,7 @@ function Session({ sessionId, loading, hasErrors, - fetchSession, + fetchV2, clearCurrentSession, }) { usePageTitle("OpenReplay Session Player"); @@ -25,7 +25,7 @@ function Session({ const { sessionStore } = useStore(); useEffect(() => { if (sessionId != null) { - fetchSession(sessionId) + fetchV2(sessionId) } else { console.error("No sessionID in route.") } @@ -67,7 +67,7 @@ export default withPermissions(['SESSION_REPLAY'], '', true)(connect((state, pro session: state.getIn([ 'sessions', 'current' ]), }; }, { - fetchSession, fetchSlackList, + fetchV2, clearCurrentSession, })(Session)); diff --git a/frontend/app/components/Session/WebPlayer.tsx b/frontend/app/components/Session/WebPlayer.tsx index 9ccef8807..07fab2877 100644 --- a/frontend/app/components/Session/WebPlayer.tsx +++ b/frontend/app/components/Session/WebPlayer.tsx @@ -1,6 +1,6 @@ import React, { useEffect, useState } from 'react'; import { connect } from 'react-redux'; -import { Modal } from 'UI'; +import { Modal, Loader } from 'UI'; import { toggleFullscreen, closeBottomBlock } from 'Duck/components/player'; import { fetchList } from 'Duck/integrations'; import { createWebPlayer } from 'Player'; @@ -68,6 +68,12 @@ function WebPlayer(props: any) { } }, [session.sessionId]); + React.useEffect(() => { + if (session.events.length > 0 || session.errors.length > 0) { + contextValue.player?.updateLists?.(session) + } + }, [session.events, session.errors, contextValue.player]) + const isPlayerReady = contextValue.store?.get().ready React.useEffect(() => { @@ -94,7 +100,7 @@ function WebPlayer(props: any) { contextValue.player.play(); }; - if (!contextValue.player || !session) return null; + if (!session) return ; return ( @@ -106,12 +112,12 @@ function WebPlayer(props: any) { fullscreen={fullscreen} /> {/* @ts-ignore */} - + /> : } {showNoteModal ? ( (
{envTag}
-
- +
{/* @ts-ignore */} {envObject[envTag]} -
))} diff --git a/frontend/app/components/Session_/BugReport/components/ReportTitle.tsx b/frontend/app/components/Session_/BugReport/components/ReportTitle.tsx index 93eb4148a..bb6bb327c 100644 --- a/frontend/app/components/Session_/BugReport/components/ReportTitle.tsx +++ b/frontend/app/components/Session_/BugReport/components/ReportTitle.tsx @@ -45,7 +45,7 @@ function ReportTitle() { /> ) : ( // @ts-ignore - +
1000 + } + return false +} + @withOverlay() export default class Event extends React.PureComponent { state = { @@ -44,35 +54,50 @@ export default class Event extends React.PureComponent { const { event } = this.props; let title = event.type; let body; + let icon; + const isFrustration = isFrustrationEvent(event); + const tooltip = { disabled: true, text: '' } + switch (event.type) { case TYPES.LOCATION: title = 'Visited'; body = event.url; + icon = 'location'; break; case TYPES.CLICK: title = 'Clicked'; body = event.label; + icon = isFrustration ? 'click_hesitation' : 'click'; + isFrustration ? Object.assign(tooltip, { disabled: false, text: `User hesitated to click for ${Math.round(event.hesitation/1000)}s`, }) : null; break; case TYPES.INPUT: title = 'Input'; body = event.value; + icon = isFrustration ? 'input_hesitation' : 'input'; + isFrustration ? Object.assign(tooltip, { disabled: false, text: `User hesitated to enter a value for ${Math.round(event.hesitation/1000)}s`, }) : null; break; case TYPES.CLICKRAGE: title = `${ event.count } Clicks`; body = event.label; + icon = 'clickrage' break; case TYPES.IOS_VIEW: title = 'View'; body = event.name; + icon = 'ios_view' + break; + case 'mouse_thrashing': + title = 'Mouse Thrashing'; + icon = 'mouse_thrashing' break; } const isLocation = event.type === TYPES.LOCATION; - const isClickrage = event.type === TYPES.CLICKRAGE; return ( +
- { event.type && } + { event.type && }
@@ -100,6 +125,7 @@ export default class Event extends React.PureComponent {
}
+ ); }; @@ -110,17 +136,15 @@ export default class Event extends React.PureComponent { isCurrent, onClick, showSelection, - onCheckboxClick, showLoadInfo, toggleLoadInfo, isRed, - extended, - highlight = false, presentInSearch = false, - isLastInGroup, whiteBg, } = this.props; const { menuOpen } = this.state; + + const isFrustration = isFrustrationEvent(event); return (
{ this.wrapper = ref } } @@ -135,7 +159,7 @@ export default class Event extends React.PureComponent { [ cls.red ]: isRed, [ cls.clickType ]: event.type === TYPES.CLICK, [ cls.inputType ]: event.type === TYPES.INPUT, - [ cls.clickrageType ]: event.type === TYPES.CLICKRAGE, + [ cls.frustration ]: isFrustration, [ cls.highlight ] : presentInSearch, [ cls.lastInGroup ]: whiteBg, }) } @@ -146,13 +170,10 @@ export default class Event extends React.PureComponent { { event.target ? 'Copy CSS' : 'Copy URL' } } -
-
+
+
{ this.renderBody() }
- {/* { event.type === TYPES.LOCATION && -
{event.url}
- } */}
{ event.type === TYPES.LOCATION && (event.fcpTime || event.visuallyComplete || event.timeToInteractive) && [] }) { performanceChartData, stackList: stackEventList, eventList: eventsList, + frustrationsList, exceptionsList, resourceList: resourceListUnmap, fetchList, @@ -46,8 +46,8 @@ function OverviewPanel({ issuesList }: { issuesList: Record[] }) { NETWORK: resourceList, ERRORS: exceptionsList, EVENTS: stackEventList, - CLICKRAGE: eventsList.filter((item: any) => item.type === TYPES.CLICKRAGE), PERFORMANCE: performanceChartData, + FRUSTRATIONS: frustrationsList, }; }, [dataLoaded]); diff --git a/frontend/app/components/Session_/OverviewPanel/components/FeatureSelection/FeatureSelection.tsx b/frontend/app/components/Session_/OverviewPanel/components/FeatureSelection/FeatureSelection.tsx index 8d76a3070..3a841d97c 100644 --- a/frontend/app/components/Session_/OverviewPanel/components/FeatureSelection/FeatureSelection.tsx +++ b/frontend/app/components/Session_/OverviewPanel/components/FeatureSelection/FeatureSelection.tsx @@ -4,15 +4,15 @@ import { Checkbox, Tooltip } from 'UI'; const NETWORK = 'NETWORK'; const ERRORS = 'ERRORS'; const EVENTS = 'EVENTS'; -const CLICKRAGE = 'CLICKRAGE'; +const FRUSTRATIONS = 'FRUSTRATIONS'; const PERFORMANCE = 'PERFORMANCE'; export const HELP_MESSAGE: any = { NETWORK: 'Network requests made in this session', EVENTS: 'Visualizes the events that takes place in the DOM', ERRORS: 'Visualizes native JS errors like Type, URI, Syntax etc.', - CLICKRAGE: 'Indicates user frustration when repeated clicks are recorded', PERFORMANCE: 'Summary of this session’s memory, and CPU consumption on the timeline', + FRUSTRATIONS: 'Indicates user frustrations in the session', }; interface Props { @@ -21,7 +21,7 @@ interface Props { } function FeatureSelection(props: Props) { const { list } = props; - const features = [NETWORK, ERRORS, EVENTS, CLICKRAGE, PERFORMANCE]; + const features = [NETWORK, ERRORS, EVENTS, PERFORMANCE, FRUSTRATIONS]; const disabled = list.length >= 5; return ( @@ -30,7 +30,7 @@ function FeatureSelection(props: Props) { const checked = list.includes(feature); const _disabled = disabled && !checked; return ( - + { ); }; - const renderClickRageElement = (item: any) => { + const renderFrustrationElement = (item: any) => { + const elData = { name: '', icon: ''} + if (item.type === TYPES.CLICK) Object.assign(elData, { name: `User hesitated to click for ${Math.round(item.hesitation/1000)}s`, icon: 'click-hesitation' }) + if (item.type === TYPES.INPUT) Object.assign(elData, { name: `User hesitated to enter a value for ${Math.round(item.hesitation/1000)}s`, icon: 'input-hesitation' }) + if (item.type === TYPES.CLICKRAGE) Object.assign(elData, { name: 'Click Rage', icon: 'click-rage' }) + if (item.type === issueTypes.MOUSE_THRASHING) Object.assign(elData, { name: 'Mouse Thrashing', icon: 'cursor-trash' }) + return ( - {'Click Rage'} + {elData.name}
} delay={0} placement="top" >
- +
); @@ -158,8 +166,8 @@ const TimelinePointer = React.memo((props: Props) => { if (type === 'NETWORK') { return renderNetworkElement(pointer); } - if (type === 'CLICKRAGE') { - return renderClickRageElement(pointer); + if (type === 'FRUSTRATIONS') { + return renderFrustrationElement(pointer); } if (type === 'ERRORS') { return renderExceptionElement(pointer); diff --git a/frontend/app/components/Session_/Player/Controls/Time.js b/frontend/app/components/Session_/Player/Controls/Time.js index 85291009f..e55f0b731 100644 --- a/frontend/app/components/Session_/Player/Controls/Time.js +++ b/frontend/app/components/Session_/Player/Controls/Time.js @@ -5,7 +5,7 @@ import { PlayTime } from 'App/player-ui' const ReduxTime = observer(({ format, name, isCustom }) => { const { store } = React.useContext(PlayerContext) - const time = store.get()[name] + const time = store.get()[name] || 0 return }) diff --git a/frontend/app/components/Session_/Player/Controls/Timeline.tsx b/frontend/app/components/Session_/Player/Controls/Timeline.tsx index 7cde52a96..a69189b49 100644 --- a/frontend/app/components/Session_/Player/Controls/Timeline.tsx +++ b/frontend/app/components/Session_/Player/Controls/Timeline.tsx @@ -81,17 +81,23 @@ function Timeline(props: IProps) { }; const showTimeTooltip = (e: React.MouseEvent) => { - if (e.target !== progressRef.current && e.target !== timelineRef.current) { + if ( + e.target !== progressRef.current + && e.target !== timelineRef.current + // @ts-ignore black magic + && !progressRef.current.contains(e.target) + ) { return props.tooltipVisible && hideTimeTooltip(); } const time = getTime(e); + if (!time) return; const tz = settingsStore.sessionSettings.timezone.value const timeStr = DateTime.fromMillis(props.startedAt + time).setZone(tz).toFormat(`hh:mm:ss a`) const timeLineTooltip = { time: Duration.fromMillis(time).toFormat(`mm:ss`), timeStr, - offset: e.nativeEvent.offsetX, + offset: e.nativeEvent.pageX, isVisible: true, }; diff --git a/frontend/app/components/Session_/Player/Controls/components/TimeTooltip.tsx b/frontend/app/components/Session_/Player/Controls/components/TimeTooltip.tsx index e47593b97..5f746f336 100644 --- a/frontend/app/components/Session_/Player/Controls/components/TimeTooltip.tsx +++ b/frontend/app/components/Session_/Player/Controls/components/TimeTooltip.tsx @@ -22,7 +22,7 @@ function TimeTooltip({ className={stl.timeTooltip} style={{ top: 0, - left: offset, + left: `calc(${offset}px - 0.5rem)`, display: isVisible ? 'block' : 'none', transform: 'translate(-50%, -110%)', whiteSpace: 'nowrap', diff --git a/frontend/app/components/Session_/ScreenRecorder/ScreenRecorder.tsx b/frontend/app/components/Session_/ScreenRecorder/ScreenRecorder.tsx index 4f7d9f0bf..97c3db78d 100644 --- a/frontend/app/components/Session_/ScreenRecorder/ScreenRecorder.tsx +++ b/frontend/app/components/Session_/ScreenRecorder/ScreenRecorder.tsx @@ -100,7 +100,8 @@ function ScreenRecorder({ }; const recordingRequest = () => { - player.assistManager.requestRecording(); + const onDeny = () => toast.info('Recording request was rejected by user') + player.assistManager.requestRecording({ onDeny }); }; if (!isSupported() || !isEnterprise) { diff --git a/frontend/app/components/Session_/Subheader.js b/frontend/app/components/Session_/Subheader.js index 8a8d4d2f6..8c87cd184 100644 --- a/frontend/app/components/Session_/Subheader.js +++ b/frontend/app/components/Session_/Subheader.js @@ -1,4 +1,4 @@ -import React from 'react'; +import React, { useMemo } from 'react'; import { Icon, Tooltip, Button } from 'UI'; import QueueControls from './QueueControls'; import Bookmark from 'Shared/Bookmark'; @@ -33,6 +33,15 @@ function SubHeader(props) { endTime, } = store.get(); + const enabledIntegration = useMemo(() => { + const { integrations } = props; + if (!integrations || !integrations.size) { + return false; + } + + return integrations.some((i) => i.token); + }) + const mappedResourceList = resourceList .filter((r) => r.isRed || r.isYellow) .concat(fetchList.filter((i) => parseInt(i.status) >= 400)) @@ -120,7 +129,7 @@ function SubHeader(props) { Create Bug Report - + {enabledIntegration && } ({ siteId: state.getIn(['site', 'siteId']) }))(observer(SubHeader)); +export default connect((state) => ({ + siteId: state.getIn(['site', 'siteId']), + integrations: state.getIn([ 'issues', 'list' ]) +}))(observer(SubHeader)); diff --git a/frontend/app/components/Signup/Signup.js b/frontend/app/components/Signup/Signup.js index 83a658ec1..fc2dfcec4 100644 --- a/frontend/app/components/Signup/Signup.js +++ b/frontend/app/components/Signup/Signup.js @@ -2,11 +2,17 @@ import React from 'react'; import withPageTitle from 'HOCs/withPageTitle'; import { Icon } from 'UI'; -import stl from './signup.module.css'; +import { connect } from 'react-redux'; import cn from 'classnames'; import SignupForm from './SignupForm'; import RegisterBg from '../../svg/register.svg'; +import HealthModal from 'Components/Header/HealthStatus/HealthModal/HealthModal'; +import { getHealthRequest } from 'Components/Header/HealthStatus/getHealth'; +import { login } from 'App/routes'; +import { withRouter } from 'react-router-dom'; +import { fetchTenants } from 'Duck/user'; +const LOGIN_ROUTE = login(); const BulletItem = ({ text }) => (
@@ -15,28 +21,84 @@ const BulletItem = ({ text }) => (
{text}
); + +const healthStatusCheck_key = '__or__healthStatusCheck_key' + +@connect( + (state, props) => ({ + loading: state.getIn(['user', 'loginRequest', 'loading']), + authDetails: state.getIn(['user', 'authDetails']), + }), { fetchTenants } +) @withPageTitle('Signup - OpenReplay') +@withRouter export default class Signup extends React.Component { + state = { + healthModalPassed: localStorage.getItem(healthStatusCheck_key === 'true'), + healthStatusLoading: true, + healthStatus: null, + } + + static getDerivedStateFromProps(nextProps, prevState) { + const { authDetails } = nextProps; + if (Object.keys(authDetails).length === 0) { + return null; + } + + if (authDetails.tenants) { + nextProps.history.push(LOGIN_ROUTE); + } + + return null; + } + + getHealth = async () => { + this.setState({ healthStatusLoading: true }); + const { healthMap } = await getHealthRequest(); + this.setState({ healthStatus: healthMap, healthStatusLoading: false }); + } + + componentDidMount() { + if (!this.state.healthModalPassed) void this.getHealth(); + + const { authDetails } = this.props; + if (Object.keys(authDetails).length === 0) { + this.props.fetchTenants(); + } + } + + setHealthModalPassed = () => { + localStorage.setItem(healthStatusCheck_key, 'true'); + this.setState({ healthModalPassed: true }); + } + render() { - return ( -
-
-
- -
- null} + healthResponse={this.state.healthStatus} + getHealth={this.getHealth} + isLoading={this.state.healthStatusLoading} + setPassed={this.setHealthModalPassed} /> -
+ ) + } + + return ( +
+ {/*
+
-
- OpenReplay Cloud{' '} -
- +
+
+ +
{' '} +
+ Cloud
-
OpenReplay Cloud is the hosted version of our open-source project.
+
OpenReplay Cloud is the hosted version of our open-source project.
We’ll manage hosting, scaling and upgrades.
@@ -46,8 +108,8 @@ export default class Signup extends React.Component {
-
-
+
*/} +
diff --git a/frontend/app/components/Signup/SignupForm/SignupForm.js b/frontend/app/components/Signup/SignupForm/SignupForm.js index f110d2dae..b8242cc84 100644 --- a/frontend/app/components/Signup/SignupForm/SignupForm.js +++ b/frontend/app/components/Signup/SignupForm/SignupForm.js @@ -1,26 +1,25 @@ -import React from 'react' -import { Form, Input, Icon, Button, Link } from 'UI' -import { login } from 'App/routes' -import ReCAPTCHA from 'react-google-recaptcha' -import stl from './signup.module.css' +import React from 'react'; +import { Form, Input, Icon, Button, Link } from 'UI'; +import { login } from 'App/routes'; +import ReCAPTCHA from 'react-google-recaptcha'; +import stl from './signup.module.css'; import { signup } from 'Duck/user'; -import { connect } from 'react-redux' -import Select from 'Shared/Select' +import { connect } from 'react-redux'; +import Select from 'Shared/Select'; import { SITE_ID_STORAGE_KEY } from 'App/constants/storageKeys'; -const LOGIN_ROUTE = login() -const recaptchaRef = React.createRef() +const LOGIN_ROUTE = login(); +const recaptchaRef = React.createRef(); @connect( - state => ({ + (state) => ({ tenants: state.getIn(['user', 'tenants']), - errors: state.getIn([ 'user', 'signupRequest', 'errors' ]), - loading: state.getIn([ 'user', 'signupRequest', 'loading' ]), + errors: state.getIn(['user', 'signupRequest', 'errors']), + loading: state.getIn(['user', 'signupRequest', 'loading']), }), - { signup }, + { signup } ) export default class SignupForm extends React.Component { - state = { tenantId: '', fullname: '', @@ -36,21 +35,30 @@ export default class SignupForm extends React.Component { if (props.errors && props.errors.size > 0 && state.reload) { recaptchaRef.current.reset(); return { - reload: false - } - } + reload: false, + }; + } return null; } handleSubmit = (token) => { const { tenantId, fullname, password, email, projectName, organizationName, auth } = this.state; - localStorage.removeItem(SITE_ID_STORAGE_KEY) - this.props.signup({ tenantId, fullname, password, email, projectName, organizationName, auth, 'g-recaptcha-response': token }) - this.setState({ reload: true }) - } + localStorage.removeItem(SITE_ID_STORAGE_KEY); + this.props.signup({ + tenantId, + fullname, + password, + email, + projectName, + organizationName, + auth, + 'g-recaptcha-response': token, + }); + this.setState({ reload: true }); + }; - write = ({ target: { value, name } }) => this.setState({ [ name ]: value }) - writeOption = ({ name, value }) => this.setState({ [ name ]: value.value }); + write = ({ target: { value, name } }) => this.setState({ [name]: value }); + writeOption = ({ name, value }) => this.setState({ [name]: value.value }); onSubmit = (e) => { e.preventDefault(); @@ -60,112 +68,134 @@ export default class SignupForm extends React.Component { } else if (!CAPTCHA_ENABLED) { this.handleSubmit(); } - } + }; render() { const { loading, errors, tenants } = this.props; const { CAPTCHA_ENABLED } = this.state; return ( -
-
-

Get Started

-
Already having an account? Sign in
+
+
+
- <> - { CAPTCHA_ENABLED && ( - this.handleSubmit(token) } - /> - )} -
- { tenants.length > 0 && ( + +
+

+ Create Account +

+
+ <> + {CAPTCHA_ENABLED && ( + this.handleSubmit(token)} + /> + )} +
- - { errors && -
- { errors.map(error => ( - + + {errors && ( +
+ {errors.map((error) => ( +
+ + + {error} +
+
+
+ ))} +
+ )} + + +
+ Already having an account?{' '} + + Login +
- } -
- -
- - ) +
+ ); } -} \ No newline at end of file +} diff --git a/frontend/app/components/shared/CodeSnippet/CodeSnippet.tsx b/frontend/app/components/shared/CodeSnippet/CodeSnippet.tsx index 116392534..5fa0fdd96 100644 --- a/frontend/app/components/shared/CodeSnippet/CodeSnippet.tsx +++ b/frontend/app/components/shared/CodeSnippet/CodeSnippet.tsx @@ -44,7 +44,7 @@ function CodeSnippet(props: Props) { r.issue=function(k,p){r.push([6,k,p])}; r.isActive=function(){return false}; r.getSessionToken=function(){}; - })("//static.openreplay.com/${window.env.TRACKER_VERSION}/openreplay.js",1,0,initOpts,startOpts); + })("${window.env.TRACKER_HOST || '//static.openreplay.com'}/${window.env.TRACKER_VERSION}/openreplay.js",1,0,initOpts,startOpts); `; return ( diff --git a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx index b7207ddef..9f7e9470a 100644 --- a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx +++ b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx @@ -128,6 +128,21 @@ export function renderDuration(r: any) { ); } +function renderStatus({ status, cached }: { status: string, cached: boolean }) { + return ( + <> + {cached ? ( + +
+ {status} + +
+
+ ) : status} + + ) +} + function NetworkPanel({ startedAt }: { startedAt: number }) { const { player, store } = React.useContext(PlayerContext) @@ -348,7 +363,8 @@ function NetworkPanel({ startedAt }: { startedAt: number }) { { label: 'Status', dataKey: 'status', - width: 70, + width: 90, + render: renderStatus, }, { label: 'Type', diff --git a/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx b/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx index 2a3183270..cae3bb2a5 100644 --- a/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx +++ b/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx @@ -87,7 +87,7 @@ function LiveSessionList(props: Props) {
-

+

Live Sessions {/* {numberWithCommas(total)} */}

diff --git a/frontend/app/components/shared/OverviewMenu/OverviewMenu.tsx b/frontend/app/components/shared/OverviewMenu/OverviewMenu.tsx new file mode 100644 index 000000000..9736c353f --- /dev/null +++ b/frontend/app/components/shared/OverviewMenu/OverviewMenu.tsx @@ -0,0 +1,52 @@ +import React from 'react'; +import { SideMenuitem } from 'UI'; +import { connect } from 'react-redux'; +import { setActiveTab } from 'Duck/search'; + +interface Props { + setActiveTab: (tab: any) => void; + activeTab: string; + isEnterprise: boolean; +} +function OverviewMenu(props: Props) { + const { activeTab, isEnterprise } = props; + + return ( +
+
+ props.setActiveTab({ type: 'all' })} + /> +
+
+
+ props.setActiveTab({ type: 'bookmark' })} + /> +
+
+
+ props.setActiveTab({ type: 'notes' })} + /> +
+
+ ); +} + +export default connect((state: any) => ({ + activeTab: state.getIn(['search', 'activeTab', 'type']), + isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee', +}), { setActiveTab })(OverviewMenu); diff --git a/frontend/app/components/shared/OverviewMenu/index.ts b/frontend/app/components/shared/OverviewMenu/index.ts new file mode 100644 index 000000000..91599b4c8 --- /dev/null +++ b/frontend/app/components/shared/OverviewMenu/index.ts @@ -0,0 +1 @@ +export { default } from './OverviewMenu'; \ No newline at end of file diff --git a/frontend/app/components/shared/ReloadButton/ReloadButton.tsx b/frontend/app/components/shared/ReloadButton/ReloadButton.tsx index 1eba6c115..774d427fe 100644 --- a/frontend/app/components/shared/ReloadButton/ReloadButton.tsx +++ b/frontend/app/components/shared/ReloadButton/ReloadButton.tsx @@ -1,5 +1,5 @@ import React from 'react'; -import { CircularLoader, Icon, Tooltip } from 'UI'; +import { CircularLoader, Icon, Tooltip, Button } from 'UI'; import cn from 'classnames'; interface Props { @@ -13,10 +13,8 @@ export default function ReloadButton(props: Props) { const { loading, onClick, iconSize = '20', iconName = 'arrow-repeat', className = '' } = props; return ( -
- {/* @ts-ignore */} - {loading ? : } -
+
); } diff --git a/frontend/app/components/shared/SessionListContainer/SessionListContainer.tsx b/frontend/app/components/shared/SessionListContainer/SessionListContainer.tsx index e3bc98b62..f5144a9a6 100644 --- a/frontend/app/components/shared/SessionListContainer/SessionListContainer.tsx +++ b/frontend/app/components/shared/SessionListContainer/SessionListContainer.tsx @@ -4,21 +4,14 @@ import SessionHeader from './components/SessionHeader'; import NotesList from './components/Notes/NoteList'; import { connect } from 'react-redux'; import LatestSessionsMessage from './components/LatestSessionsMessage'; -import { clearCurrentSession } from "Duck/sessions"; function SessionListContainer({ activeTab, members, - clearCurrentSession, }: { activeTab: string; - fetchMembers: () => void; members: object[]; - clearCurrentSession: () => void; }) { - React.useEffect(() => { - clearCurrentSession() - }, []) return (
@@ -36,5 +29,4 @@ export default connect( // @ts-ignore members: state.getIn(['members', 'list']), }), - { clearCurrentSession } )(SessionListContainer); diff --git a/frontend/app/components/shared/SessionListContainer/components/Notes/NoteTags.tsx b/frontend/app/components/shared/SessionListContainer/components/Notes/NoteTags.tsx index 2831824b3..b7a4f6101 100644 --- a/frontend/app/components/shared/SessionListContainer/components/Notes/NoteTags.tsx +++ b/frontend/app/components/shared/SessionListContainer/components/Notes/NoteTags.tsx @@ -18,7 +18,7 @@ function NoteTags() { const { notesStore } = useStore(); return ( -
+
notesStore.toggleTag()} @@ -35,7 +35,7 @@ function NoteTags() { />
))} -
+