Merge pull request #990 from openreplay/dev

This commit is contained in:
Mehdi Osman 2023-02-27 12:18:08 -05:00 committed by GitHub
commit 823d64c220
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
1591 changed files with 120663 additions and 52332 deletions

154
.github/workflows/alerts-ee.yaml vendored Normal file
View file

@ -0,0 +1,154 @@
# This action will push the alerts changes to aws
on:
workflow_dispatch:
inputs:
skip_security_checks:
description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false'
required: false
default: 'false'
push:
branches:
- api-v1.10.0
paths:
- "ee/api/**"
- "api/**"
- "!api/.gitignore"
- "!api/routers"
- "!api/app.py"
- "!api/*-dev.sh"
- "!api/requirements.txt"
- "!api/requirements-crons.txt"
- "!ee/api/.gitignore"
- "!ee/api/routers"
- "!ee/api/app.py"
- "!ee/api/*-dev.sh"
- "!ee/api/requirements.txt"
- "!ee/api/requirements-crons.txt"
name: Build and Deploy Alerts EE
jobs:
deploy:
name: Deploy
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
# We need to diff with old commit
# to see which workers got changed.
fetch-depth: 2
- name: Docker login
run: |
docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}"
- uses: azure/k8s-set-context@v1
with:
method: kubeconfig
kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext
# Caching docker images
- uses: satackey/action-docker-layer-caching@v0.0.11
# Ignore the failure of a step and avoid terminating the job.
continue-on-error: true
- name: Building and Pushing api image
id: build-image
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee
ENVIRONMENT: staging
run: |
skip_security_checks=${{ github.event.inputs.skip_security_checks }}
cd api
PUSH_IMAGE=0 bash -x ./build_alerts.sh ee
[[ "x$skip_security_checks" == "xtrue" ]] || {
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./
images=("alerts")
for image in ${images[*]};do
./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
done
err_code=$?
[[ $err_code -ne 0 ]] && {
exit $err_code
}
} && {
echo "Skipping Security Checks"
}
images=("alerts")
for image in ${images[*]};do
docker push $DOCKER_REPO/$image:$IMAGE_TAG
done
- name: Creating old image input
run: |
#
# Create yaml with existing image tags
#
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
echo > /tmp/image_override.yaml
for line in `cat /tmp/image_tag.txt`;
do
image_array=($(echo "$line" | tr ':' '\n'))
cat <<EOF >> /tmp/image_override.yaml
${image_array[0]}:
image:
# We've to strip off the -ee, as helm will append it.
tag: `echo ${image_array[1]} | cut -d '-' -f 1`
EOF
done
- name: Deploy to kubernetes
run: |
cd scripts/helmcharts/
## Update secerts
sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml
sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.EE_PG_PASSWORD }}\"/g" vars.yaml
sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\"/g" vars.yaml
sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.EE_MINIO_SECRET_KEY }}\"/g" vars.yaml
sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.EE_JWT_SECRET }}\"/g" vars.yaml
sed -i "s/domainName: \"\"/domainName: \"${{ secrets.EE_DOMAIN_NAME }}\"/g" vars.yaml
sed -i "s/enterpriseEditionLicense: \"\"/enterpriseEditionLicense: \"${{ secrets.EE_LICENSE_KEY }}\"/g" vars.yaml
# Update changed image tag
sed -i "/alerts/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
cat /tmp/image_override.yaml
# Deploy command
mv openreplay/charts/{ingress-nginx,alerts,quickwit} /tmp
rm -rf openreplay/charts/*
mv /tmp/{ingress-nginx,alerts,quickwit} openreplay/charts/
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# We're not passing -ee flag, because helm will add that.
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
- name: Alert slack
if: ${{ failure() }}
uses: rtCamp/action-slack-notify@v2
env:
SLACK_CHANNEL: ee
SLACK_TITLE: "Failed ${{ github.workflow }}"
SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff'
SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }}
SLACK_USERNAME: "OR Bot"
SLACK_MESSAGE: 'Build failed :bomb:'
# - name: Debug Job
# # if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3
# env:
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# IMAGE_TAG: ${{ github.sha }}-ee
# ENVIRONMENT: staging

143
.github/workflows/alerts.yaml vendored Normal file
View file

@ -0,0 +1,143 @@
# This action will push the alerts changes to aws
on:
workflow_dispatch:
inputs:
skip_security_checks:
description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false'
required: false
default: 'false'
push:
branches:
- api-v1.10.0
paths:
- "api/**"
- "!api/.gitignore"
- "!api/routers"
- "!api/app.py"
- "!api/*-dev.sh"
- "!api/requirements.txt"
- "!api/requirements-crons.txt"
name: Build and Deploy Alerts
jobs:
deploy:
name: Deploy
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
# We need to diff with old commit
# to see which workers got changed.
fetch-depth: 2
- name: Docker login
run: |
docker login ${{ secrets.OSS_REGISTRY_URL }} -u ${{ secrets.OSS_DOCKER_USERNAME }} -p "${{ secrets.OSS_REGISTRY_TOKEN }}"
- uses: azure/k8s-set-context@v1
with:
method: kubeconfig
kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext
# Caching docker images
- uses: satackey/action-docker-layer-caching@v0.0.11
# Ignore the failure of a step and avoid terminating the job.
continue-on-error: true
- name: Building and Pushing Alerts image
id: build-image
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
run: |
skip_security_checks=${{ github.event.inputs.skip_security_checks }}
cd api
PUSH_IMAGE=0 bash -x ./build_alerts.sh
[[ "x$skip_security_checks" == "xtrue" ]] || {
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./
images=("alerts")
for image in ${images[*]};do
./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
done
err_code=$?
[[ $err_code -ne 0 ]] && {
exit $err_code
}
} && {
echo "Skipping Security Checks"
}
images=("alerts")
for image in ${images[*]};do
docker push $DOCKER_REPO/$image:$IMAGE_TAG
done
- name: Creating old image input
run: |
#
# Create yaml with existing image tags
#
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
echo > /tmp/image_override.yaml
for line in `cat /tmp/image_tag.txt`;
do
image_array=($(echo "$line" | tr ':' '\n'))
cat <<EOF >> /tmp/image_override.yaml
${image_array[0]}:
image:
tag: ${image_array[1]}
EOF
done
- name: Deploy to kubernetes
run: |
cd scripts/helmcharts/
## Update secerts
sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml
sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.OSS_PG_PASSWORD }}\"/g" vars.yaml
sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\"/g" vars.yaml
sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\"/g" vars.yaml
sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.OSS_JWT_SECRET }}\"/g" vars.yaml
sed -i "s/domainName: \"\"/domainName: \"${{ secrets.OSS_DOMAIN_NAME }}\"/g" vars.yaml
# Update changed image tag
sed -i "/alerts/{n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
cat /tmp/image_override.yaml
# Deploy command
mv openreplay/charts/{ingress-nginx,alerts,quickwit} /tmp
rm -rf openreplay/charts/*
mv /tmp/{ingress-nginx,alerts,quickwit} openreplay/charts/
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f -
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
- name: Alert slack
if: ${{ failure() }}
uses: rtCamp/action-slack-notify@v2
env:
SLACK_CHANNEL: foss
SLACK_TITLE: "Failed ${{ github.workflow }}"
SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff'
SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }}
SLACK_USERNAME: "OR Bot"
SLACK_MESSAGE: 'Build failed :bomb:'
# - name: Debug Job
# if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3
# env:
# DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
# IMAGE_TAG: ${{ github.sha }}
# ENVIRONMENT: staging

View file

@ -8,10 +8,20 @@ on:
default: 'false'
push:
branches:
- dev
- api-v1.10.0
paths:
- ee/api/**
- api/**
- "ee/api/**"
- "api/**"
- "!api/.gitignore"
- "!api/app_alerts.py"
- "!api/*-dev.sh"
- "!api/requirements-*.txt"
- "!ee/api/.gitignore"
- "!ee/api/app_alerts.py"
- "!ee/api/app_crons.py"
- "!ee/api/*-dev.sh"
- "!ee/api/requirements-*.txt"
name: Build and Deploy Chalice EE
@ -44,7 +54,7 @@ jobs:
continue-on-error: true
- name: Building and Pusing api image
- name: Building and Pushing api image
id: build-image
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
@ -56,7 +66,7 @@ jobs:
PUSH_IMAGE=0 bash -x ./build.sh ee
[[ "x$skip_security_checks" == "xtrue" ]] || {
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./
images=("chalice" "alerts")
images=("chalice")
for image in ${images[*]};do
./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
done
@ -67,7 +77,10 @@ jobs:
} && {
echo "Skipping Security Checks"
}
PUSH_IMAGE=1 bash -x ./build.sh ee
images=("chalice")
for image in ${images[*]};do
docker push $DOCKER_REPO/$image:$IMAGE_TAG
done
- name: Creating old image input
run: |
#
@ -107,7 +120,10 @@ jobs:
cat /tmp/image_override.yaml
# Deploy command
helm upgrade --install openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set skipMigration=true --no-hooks
mv openreplay/charts/{ingress-nginx,chalice,quickwit} /tmp
rm -rf openreplay/charts/*
mv /tmp/{ingress-nginx,chalice,quickwit} openreplay/charts/
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# We're not passing -ee flag, because helm will add that.

View file

@ -8,9 +8,13 @@ on:
default: 'false'
push:
branches:
- dev
- api-v1.10.0
paths:
- api/**
- "api/**"
- "!api/.gitignore"
- "!api/app_alerts.py"
- "!api/*-dev.sh"
- "!api/requirements-*.txt"
name: Build and Deploy Chalice
@ -43,7 +47,7 @@ jobs:
continue-on-error: true
- name: Building and Pusing api image
- name: Building and Pushing api image
id: build-image
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
@ -55,7 +59,7 @@ jobs:
PUSH_IMAGE=0 bash -x ./build.sh
[[ "x$skip_security_checks" == "xtrue" ]] || {
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./
images=("chalice" "alerts")
images=("chalice")
for image in ${images[*]};do
./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
done
@ -66,7 +70,10 @@ jobs:
} && {
echo "Skipping Security Checks"
}
PUSH_IMAGE=1 bash -x ./build.sh
images=("chalice")
for image in ${images[*]};do
docker push $DOCKER_REPO/$image:$IMAGE_TAG
done
- name: Creating old image input
run: |
#
@ -131,4 +138,4 @@ jobs:
# DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
# IMAGE_TAG: ${{ github.sha }}
# ENVIRONMENT: staging
#

View file

@ -117,4 +117,4 @@ jobs:
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# IMAGE_TAG: ${{ github.sha }}
# ENVIRONMENT: staging
#
#

View file

@ -116,4 +116,4 @@ jobs:
# DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
# IMAGE_TAG: ${{ github.sha }}
# ENVIRONMENT: staging
#
#

153
.github/workflows/crons-ee.yaml vendored Normal file
View file

@ -0,0 +1,153 @@
# This action will push the crons changes to aws
on:
workflow_dispatch:
inputs:
skip_security_checks:
description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false'
required: false
default: 'false'
push:
branches:
- api-v1.10.0
paths:
- "ee/api/**"
- "api/**"
- "!api/.gitignore"
- "!api/app.py"
- "!api/app_alerts.py"
- "!api/*-dev.sh"
- "!api/requirements.txt"
- "!api/requirements-alerts.txt"
- "!ee/api/.gitignore"
- "!ee/api/app.py"
- "!ee/api/app_alerts.py"
- "!ee/api/*-dev.sh"
- "!ee/api/requirements.txt"
- "!ee/api/requirements-crons.txt"
name: Build and Deploy Crons EE
jobs:
deploy:
name: Deploy
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
# We need to diff with old commit
# to see which workers got changed.
fetch-depth: 2
- name: Docker login
run: |
docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}"
- uses: azure/k8s-set-context@v1
with:
method: kubeconfig
kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext
# Caching docker images
- uses: satackey/action-docker-layer-caching@v0.0.11
# Ignore the failure of a step and avoid terminating the job.
continue-on-error: true
- name: Building and Pushing api image
id: build-image
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee
ENVIRONMENT: staging
run: |
skip_security_checks=${{ github.event.inputs.skip_security_checks }}
cd api
PUSH_IMAGE=0 bash -x ./build_crons.sh ee
[[ "x$skip_security_checks" == "xtrue" ]] || {
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./
images=("crons")
for image in ${images[*]};do
./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
done
err_code=$?
[[ $err_code -ne 0 ]] && {
exit $err_code
}
} && {
echo "Skipping Security Checks"
}
images=("crons")
for image in ${images[*]};do
docker push $DOCKER_REPO/$image:$IMAGE_TAG
done
- name: Creating old image input
run: |
#
# Create yaml with existing image tags
#
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
echo > /tmp/image_override.yaml
for line in `cat /tmp/image_tag.txt`;
do
image_array=($(echo "$line" | tr ':' '\n'))
cat <<EOF >> /tmp/image_override.yaml
${image_array[0]}:
image:
# We've to strip off the -ee, as helm will append it.
tag: `echo ${image_array[1]} | cut -d '-' -f 1`
EOF
done
- name: Deploy to kubernetes
run: |
cd scripts/helmcharts/
## Update secerts
sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml
sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.EE_PG_PASSWORD }}\"/g" vars.yaml
sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\"/g" vars.yaml
sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.EE_MINIO_SECRET_KEY }}\"/g" vars.yaml
sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.EE_JWT_SECRET }}\"/g" vars.yaml
sed -i "s/domainName: \"\"/domainName: \"${{ secrets.EE_DOMAIN_NAME }}\"/g" vars.yaml
sed -i "s/enterpriseEditionLicense: \"\"/enterpriseEditionLicense: \"${{ secrets.EE_LICENSE_KEY }}\"/g" vars.yaml
# Update changed image tag
sed -i "/crons/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
cat /tmp/image_override.yaml
# Deploy command
mv openreplay/charts/{ingress-nginx,utilities,quickwit} /tmp
rm -rf openreplay/charts/*
mv /tmp/{ingress-nginx,utilities,quickwit} openreplay/charts/
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# We're not passing -ee flag, because helm will add that.
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
- name: Alert slack
if: ${{ failure() }}
uses: rtCamp/action-slack-notify@v2
env:
SLACK_CHANNEL: ee
SLACK_TITLE: "Failed ${{ github.workflow }}"
SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff'
SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }}
SLACK_USERNAME: "OR Bot"
SLACK_MESSAGE: 'Build failed :bomb:'
# - name: Debug Job
# if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3
# env:
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# IMAGE_TAG: ${{ github.sha }}-ee
# ENVIRONMENT: staging
#

View file

@ -20,9 +20,11 @@ jobs:
restore-keys: |
${{ runner.OS }}-build-
${{ runner.OS }}-
- name: Docker login
run: |
docker login ${{ secrets.OSS_REGISTRY_URL }} -u ${{ secrets.OSS_DOCKER_USERNAME }} -p "${{ secrets.OSS_REGISTRY_TOKEN }}"
- uses: azure/k8s-set-context@v1
with:
method: kubeconfig
@ -45,15 +47,18 @@ jobs:
docker tag $DOCKER_REPO/frontend:${IMAGE_TAG} $DOCKER_REPO/frontend:${IMAGE_TAG}-ee
docker push $DOCKER_REPO/frontend:${IMAGE_TAG}
docker push $DOCKER_REPO/frontend:${IMAGE_TAG}-ee
- name: Deploy to kubernetes foss
run: |
cd scripts/helmcharts/
set -x
cat <<EOF>>/tmp/image_override.yaml
frontend:
image:
tag: ${IMAGE_TAG}
EOF
## Update secerts
sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml
sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.DEV_PG_PASSWORD }}\"/g" vars.yaml
@ -61,8 +66,10 @@ jobs:
sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.DEV_MINIO_SECRET_KEY }}\"/g" vars.yaml
sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.DEV_JWT_SECRET }}\"/g" vars.yaml
sed -i "s/domainName: \"\"/domainName: \"${{ secrets.DEV_DOMAIN_NAME }}\"/g" vars.yaml
# Update changed image tag
sed -i "/frontend/{n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
cat /tmp/image_override.yaml
# Deploy command
mv openreplay/charts/{ingress-nginx,frontend,quickwit} /tmp

View file

@ -1,4 +1,4 @@
name: Frontend FOSS Deployment
name: Frontend Foss Deployment
on:
workflow_dispatch:
push:

69
.github/workflows/peers-ee.yaml vendored Normal file
View file

@ -0,0 +1,69 @@
# This action will push the peers changes to aws
on:
workflow_dispatch:
push:
branches:
- dev
paths:
- "ee/peers/**"
- "peers/**"
- "!peers/.gitignore"
- "!peers/*-dev.sh"
name: Build and Deploy Peers
jobs:
deploy:
name: Deploy
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
# We need to diff with old commit
# to see which workers got changed.
fetch-depth: 2
- name: Docker login
run: |
docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}"
- uses: azure/k8s-set-context@v1
with:
method: kubeconfig
kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext
- name: Building and Pushing api image
id: build-image
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
run: |
cd peers
PUSH_IMAGE=1 bash build.sh ee
- name: Deploy to kubernetes
run: |
cd scripts/helmcharts/
sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.EE_REGISTRY_URL }}\"#g" vars.yaml
sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\" #g" vars.yaml
sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.EE_MINIO_SECRET_KEY }}\" #g" vars.yaml
sed -i "s#domain_name.*#domain_name: \"ee.openreplay.com\" #g" vars.yaml
sed -i "s#kubeconfig.*#kubeconfig_path: ${KUBECONFIG}#g" vars.yaml
sed -i "s/image_tag:.*/image_tag: \"$IMAGE_TAG\"/g" vars.yaml
bash kube-install.sh --app peers
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
# - name: Debug Job
# if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3
# env:
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# IMAGE_TAG: ${{ github.sha }}
# ENVIRONMENT: staging
#

68
.github/workflows/peers.yaml vendored Normal file
View file

@ -0,0 +1,68 @@
# This action will push the peers changes to aws
on:
workflow_dispatch:
push:
branches:
- dev
paths:
- "peers/**"
- "!peers/.gitignore"
- "!peers/*-dev.sh"
name: Build and Deploy Peers
jobs:
deploy:
name: Deploy
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
# We need to diff with old commit
# to see which workers got changed.
fetch-depth: 2
- name: Docker login
run: |
docker login ${{ secrets.OSS_REGISTRY_URL }} -u ${{ secrets.OSS_DOCKER_USERNAME }} -p "${{ secrets.OSS_REGISTRY_TOKEN }}"
- uses: azure/k8s-set-context@v1
with:
method: kubeconfig
kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext
- name: Building and Pushing api image
id: build-image
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
run: |
cd peers
PUSH_IMAGE=1 bash build.sh
- name: Deploy to kubernetes
run: |
cd scripts/helmcharts/
sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml
sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\" #g" vars.yaml
sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\" #g" vars.yaml
sed -i "s#domain_name.*#domain_name: \"foss.openreplay.com\" #g" vars.yaml
sed -i "s#kubeconfig.*#kubeconfig_path: ${KUBECONFIG}#g" vars.yaml
sed -i "s/image_tag:.*/image_tag: \"$IMAGE_TAG\"/g" vars.yaml
bash kube-install.sh --app peers
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
# - name: Debug Job
# if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3
# env:
# DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
# IMAGE_TAG: ${{ github.sha }}
# ENVIRONMENT: staging
#

View file

@ -5,9 +5,11 @@ on:
branches:
- dev
paths:
- sourcemap-reader/**
- "sourcemap-reader/**"
- "!sourcemap-reader/.gitignore"
- "!sourcemap-reader/*-dev.sh"
name: Build and Deploy Chalice
name: Build and Deploy sourcemap-reader
jobs:
deploy:
@ -38,7 +40,7 @@ jobs:
continue-on-error: true
- name: Building and Pusing sourcemaps-reader image
- name: Building and Pushing sourcemaps-reader image
id: build-image
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}

View file

@ -7,34 +7,131 @@ on:
branches: [ "main" ]
paths:
- frontend/**
- tracker/**
pull_request:
branches: [ "dev", "main" ]
paths:
- frontend/**
- tracker/**
env:
API: ${{ secrets.E2E_API_ORIGIN }}
ASSETS: ${{ secrets.E2E_ASSETS_ORIGIN }}
APIEDP: ${{ secrets.E2E_EDP_ORIGIN }}
CY_ACC: ${{ secrets.CYPRESS_ACCOUNT }}
CY_PASS: ${{ secrets.CYPRESS_PASSWORD }}
FOSS_PROJECT_KEY: ${{ secrets.FOSS_PROJECT_KEY }}
FOSS_INGEST: ${{ secrets.FOSS_INGEST }}
jobs:
build:
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./frontend
build-and-test:
runs-on: macos-latest
name: Build and test Tracker plus Replayer
strategy:
matrix:
node-version: [16.x]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
node-version: [ 16.x ]
steps:
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
- name: Setup packages
run: npm i --legacy-peer-deps --ignore-scripts
- name: Run unit tests
run: npm test --if-resent
- name: Run Frontend
run: npm start
- name: Wait for frontend
run: npx wait-on http://0.0.0.0:3333
- name: Run visual tests
run: npm run cy:test --if-present
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
- name: Cache tracker modules
uses: actions/cache@v1
with:
path: tracker/tracker/node_modules
key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
test_tracker_build{{ runner.OS }}-build-
test_tracker_build{{ runner.OS }}-
- name: Setup Testing packages
run: |
cd tracker/tracker
npm i -g yarn
yarn
- name: Jest tests
run: |
cd tracker/tracker
yarn test
- name: Build tracker inst
run: |
cd tracker/tracker
yarn build
- name: Setup Testing UI Env
run: |
cd tracker/tracker-testing-playground
echo "REACT_APP_KEY=$FOSS_PROJECT_KEY" >> .env
echo "REACT_APP_INGEST=$FOSS_INGEST" >> .env
- name: Setup Testing packages
run: |
cd tracker/tracker-testing-playground
yarn
- name: Wait for Testing Frontend
run: |
cd tracker/tracker-testing-playground
yarn start &> ui.log &
npx wait-on http://localhost:3000
cd ../../frontend
- name: Cache node modules
uses: actions/cache@v1
with:
path: frontend/node_modules
key: ${{ runner.OS }}-build-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.OS }}-build-
${{ runner.OS }}-
- name: Setup env
run: |
cd frontend
echo "NODE_ENV=development" >> .env
echo "SOURCEMAP=true" >> .env
echo "ORIGIN=$API" >> .env
echo "ASSETS_HOST=$ASSETS" >> .env
echo "API_EDP=$APIEDP" >> .env
echo "SENTRY_ENABLED = false" >> .env
echo "SENTRY_URL = ''" >> .env
echo "CAPTCHA_ENABLED = false" >> .env
echo "CAPTCHA_SITE_KEY = 'asdad'" >> .env
echo "MINIO_ENDPOINT = ''" >> .env
echo "MINIO_PORT = ''" >> .env
echo "MINIO_USE_SSL = ''" >> .env
echo "MINIO_ACCESS_KEY = ''" >> .env
echo "MINIO_SECRET_KEY = ''" >> .env
echo "VERSION = '1.9.0'" >> .env
echo "TRACKER_VERSION = '4.0.0'" >> .env
echo "COMMIT_HASH = 'dev'" >> .env
echo "{ \"account\": \"$CY_ACC\", \"password\": \"$CY_PASS\" }" >> cypress.env.json
- name: Setup packages
run: |
cd frontend
yarn
- name: Run unit tests
run: |
cd frontend
yarn test
- name: Run Frontend
run: |
cd frontend
yarn start &> frontend.log &
- name: Wait for frontend
run: |
cd frontend
npx wait-on http://0.0.0.0:3333
- name: (Chrome) Run visual tests
run: |
cd frontend
yarn cy:test
# firefox have different viewport somehow
# - name: (Firefox) Run visual tests
# run: yarn cy:test-firefox
# - name: (Edge) Run visual tests
# run: yarn cy:test-edge
- name: Upload Debug
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: 'Snapshots'
path: |
frontend/cypress/videos
frontend/cypress/snapshots/replayer.cy.ts
frontend/cypress/screenshots
frontend/cypress/snapshots/generalStability.cy.ts

View file

@ -71,12 +71,10 @@ jobs:
case ${build_param} in
false)
{
git diff --name-only HEAD HEAD~1 | grep -E "backend/cmd|backend/services" | grep -vE ^ee/ | cut -d '/' -f3
git diff --name-only HEAD HEAD~1 | grep -E "backend/pkg|backend/internal" | grep -vE ^ee/ | cut -d '/' -f3 | uniq | while read -r pkg_name ; do
grep -rl "pkg/$pkg_name" backend/services backend/cmd | cut -d '/' -f3
done
} | uniq > /tmp/images_to_build.txt
} | awk '!seen[$0]++' > /tmp/images_to_build.txt
;;
all)
ls backend/cmd > /tmp/images_to_build.txt
@ -95,6 +93,7 @@ jobs:
# Pushing image to registry
#
cd backend
cat /tmp/images_to_build.txt
for image in $(cat /tmp/images_to_build.txt);
do
echo "Bulding $image"
@ -109,7 +108,7 @@ jobs:
} && {
echo "Skipping Security Checks"
}
PUSH_IMAGE=1 bash -x ./build.sh ee $image
docker push $DOCKER_REPO/$image:$IMAGE_TAG
echo "::set-output name=image::$DOCKER_REPO/$image:$IMAGE_TAG"
done
@ -156,22 +155,19 @@ jobs:
mv /tmp/helmcharts/* openreplay/charts/
ls openreplay/charts
cat /tmp/image_override.yaml
# Deploy command
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true | kubectl apply -f -
#- name: Alert slack
# if: ${{ failure() }}
# uses: rtCamp/action-slack-notify@v2
# env:
# SLACK_CHANNEL: ee
# SLACK_TITLE: "Failed ${{ github.workflow }}"
# SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff'
# SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }}
# SLACK_USERNAME: "OR Bot"
# SLACK_MESSAGE: 'Build failed :bomb:'
- name: Alert slack
if: ${{ failure() }}
uses: rtCamp/action-slack-notify@v2
env:
SLACK_CHANNEL: ee
SLACK_TITLE: "Failed ${{ github.workflow }}"
SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff'
SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }}
SLACK_USERNAME: "OR Bot"
SLACK_MESSAGE: 'Build failed :bomb:'
# - name: Debug Job
# if: ${{ failure() }}

View file

@ -71,12 +71,10 @@ jobs:
case ${build_param} in
false)
{
git diff --name-only HEAD HEAD~1 | grep -E "backend/cmd|backend/services" | grep -vE ^ee/ | cut -d '/' -f3
git diff --name-only HEAD HEAD~1 | grep -E "backend/pkg|backend/internal" | grep -vE ^ee/ | cut -d '/' -f3 | uniq | while read -r pkg_name ; do
grep -rl "pkg/$pkg_name" backend/services backend/cmd | cut -d '/' -f3
done
} | uniq > /tmp/images_to_build.txt
} | awk '!seen[$0]++' > /tmp/images_to_build.txt
;;
all)
ls backend/cmd > /tmp/images_to_build.txt
@ -95,6 +93,7 @@ jobs:
# Pushing image to registry
#
cd backend
cat /tmp/images_to_build.txt
for image in $(cat /tmp/images_to_build.txt);
do
echo "Bulding $image"
@ -109,7 +108,7 @@ jobs:
} && {
echo "Skipping Security Checks"
}
PUSH_IMAGE=1 bash -x ./build.sh skip $image
docker push $DOCKER_REPO/$image:$IMAGE_TAG
echo "::set-output name=image::$DOCKER_REPO/$image:$IMAGE_TAG"
done
@ -154,8 +153,6 @@ jobs:
mv /tmp/helmcharts/* openreplay/charts/
ls openreplay/charts
cat /tmp/image_override.yaml
# Deploy command
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true | kubectl apply -f -

4
.gitignore vendored
View file

@ -3,4 +3,6 @@ public
node_modules
*DS_Store
*.env
.idea
*.log
**/*.envrc
.idea

View file

@ -34,7 +34,7 @@
OpenReplay is a session replay suite you can host yourself, that lets you see what users do on your web app, helping you troubleshoot issues faster. It's the only open-source alternative to products such as FullStory and LogRocket.
- **Session replay.** OpenReplay replays what users do, but not only. It also shows you what went under the hood, how your website or app behaves by capturing network activity, console logs, JS errors, store actions/state, page speed metrics, cpu/memory usage and much more.
- **Low footprint**. With a ~18KB (.gz) tracker that asynchronously sends minimal data for a very limited impact on performance.
- **Low footprint**. With a ~19KB (.gz) tracker that asynchronously sends minimal data for a very limited impact on performance.
- **Self-hosted**. No more security compliance checks, 3rd-parties processing user data. Everything OpenReplay captures stays in your cloud for a complete control over your data.
- **Privacy controls**. Fine-grained security features for sanitizing user data.
- **Easy deploy**. With support of major public cloud providers (AWS, GCP, Azure, DigitalOcean).

4
api/.gitignore vendored
View file

@ -84,6 +84,7 @@ wheels/
*.egg
MANIFEST
Pipfile
Pipfile.lock
# PyInstaller
# Usually these files are written by a python script from a template
@ -175,5 +176,4 @@ SUBNETS.json
./chalicelib/.configs
README/*
.local
build_crons.sh
.local

3
api/.trivyignore Normal file
View file

@ -0,0 +1,3 @@
# Accept the risk until
# python setup tools recently fixed. Not yet avaialable in distros.
CVE-2022-40897 exp:2023-02-01

View file

@ -1,6 +1,9 @@
FROM python:3.10-alpine
FROM python:3.11-alpine
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
ARG GIT_SHA
LABEL GIT_SHA=$GIT_SHA
RUN apk add --no-cache build-base tini
ARG envarg
# Add Tini
@ -9,7 +12,8 @@ ENV SOURCE_MAP_VERSION=0.7.4 \
APP_NAME=chalice \
LISTEN_PORT=8000 \
PRIVATE_ENDPOINTS=false \
ENTERPRISE_BUILD=${envarg}
ENTERPRISE_BUILD=${envarg} \
GIT_SHA=$GIT_SHA
WORKDIR /work
COPY requirements.txt ./requirements.txt

View file

@ -1,6 +1,9 @@
FROM python:3.10-alpine
FROM python:3.11-alpine
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
ARG GIT_SHA
LABEL GIT_SHA=$GIT_SHA
RUN apk add --no-cache build-base tini
ARG envarg
ENV APP_NAME=alerts \
@ -8,6 +11,7 @@ ENV APP_NAME=alerts \
PG_MAXCONN=10 \
LISTEN_PORT=8000 \
PRIVATE_ENDPOINTS=true \
GIT_SHA=$GIT_SHA \
ENTERPRISE_BUILD=${envarg}
WORKDIR /work

View file

@ -1,29 +0,0 @@
FROM python:3.9.12-slim
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
WORKDIR /work
COPY . .
COPY ../utilities ./utilities
RUN rm entrypoint.sh && rm .chalice/config.json
RUN mv entrypoint.bundle.sh entrypoint.sh && mv .chalice/config.bundle.json .chalice/config.json
RUN pip install -r requirements.txt -t ./vendor --upgrade
RUN pip install chalice==1.22.2
# Installing Nodejs
RUN apt update && apt install -y curl && \
curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \
apt install -y nodejs && \
apt remove --purge -y curl && \
rm -rf /var/lib/apt/lists/* && \
cd utilities && \
npm install
# Add Tini
# Startup daemon
ENV TINI_VERSION v0.19.0
ARG envarg
ENV ENTERPRISE_BUILD ${envarg}
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini
RUN chmod +x /tini
RUN adduser -u 1001 openreplay -D
USER 1001
ENTRYPOINT ["/tini", "--"]
CMD ./entrypoint.sh

View file

@ -12,7 +12,7 @@ from chalicelib.utils import pg_client
from routers import core, core_dynamic
from routers.crons import core_crons
from routers.crons import core_dynamic_crons
from routers.subs import dashboard, insights, metrics, v1_api
from routers.subs import insights, metrics, v1_api
app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default=""))
app.add_middleware(GZipMiddleware, minimum_size=1000)
@ -48,7 +48,6 @@ app.include_router(core.app_apikey)
app.include_router(core_dynamic.public_app)
app.include_router(core_dynamic.app)
app.include_router(core_dynamic.app_apikey)
app.include_router(dashboard.app)
app.include_router(metrics.app)
app.include_router(insights.app)
app.include_router(v1_api.app_apikey)

View file

@ -53,3 +53,10 @@ async def stop_server():
await shutdown()
import os, signal
os.kill(1, signal.SIGTERM)
if config("LOCAL_DEV", default=False, cast=bool):
@app.get('/private/trigger', tags=["private"])
async def trigger_main_cron():
logging.info("Triggering main cron")
alerts_processor.process()

View file

@ -16,7 +16,8 @@ exit_err() {
}
environment=$1
git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)}
git_sha=$(git rev-parse --short HEAD)
image_tag=${IMAGE_TAG:-git_sha}
envarg="default-foss"
check_prereq() {
which docker || {
@ -41,25 +42,28 @@ function build_api(){
tag="ee-"
}
mv Dockerfile.dockerignore .dockerignore
docker build -f ./Dockerfile --build-arg envarg=$envarg -t ${DOCKER_REPO:-'local'}/chalice:${git_sha1} .
docker build -f ./Dockerfile --build-arg envarg=$envarg --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/chalice:${image_tag} .
cd ../api
rm -rf ../${destination}
[[ $PUSH_IMAGE -eq 1 ]] && {
docker push ${DOCKER_REPO:-'local'}/chalice:${git_sha1}
docker tag ${DOCKER_REPO:-'local'}/chalice:${git_sha1} ${DOCKER_REPO:-'local'}/chalice:${tag}latest
docker push ${DOCKER_REPO:-'local'}/chalice:${image_tag}
docker tag ${DOCKER_REPO:-'local'}/chalice:${image_tag} ${DOCKER_REPO:-'local'}/chalice:${tag}latest
docker push ${DOCKER_REPO:-'local'}/chalice:${tag}latest
}
[[ $SIGN_IMAGE -eq 1 ]] && {
cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/chalice:${image_tag}
}
echo "api docker build completed"
}
check_prereq
build_api $environment
echo buil_complete
IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO bash build_alerts.sh $1
[[ $environment == "ee" ]] && {
cp ../ee/api/build_crons.sh .
IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO bash build_crons.sh $1
exit_err $?
rm build_crons.sh
} || true
#IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO SIGN_IMAGE=$SIGN_IMAGE SIGN_KEY=$SIGN_KEY bash build_alerts.sh $1
#
#[[ $environment == "ee" ]] && {
# cp ../ee/api/build_crons.sh .
# IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO SIGN_IMAGE=$SIGN_IMAGE SIGN_KEY=$SIGN_KEY bash build_crons.sh $1
# exit_err $?
# rm build_crons.sh
#} || true

View file

@ -7,7 +7,8 @@
# Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh <ee>
git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)}
git_sha=$(git rev-parse --short HEAD)
image_tag=${IMAGE_TAG:-git_sha}
envarg="default-foss"
check_prereq() {
which docker || {
@ -31,14 +32,17 @@ function build_alerts(){
tag="ee-"
}
mv Dockerfile_alerts.dockerignore .dockerignore
docker build -f ./Dockerfile_alerts --build-arg envarg=$envarg -t ${DOCKER_REPO:-'local'}/alerts:${git_sha1} .
docker build -f ./Dockerfile_alerts --build-arg envarg=$envarg --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/alerts:${image_tag} .
cd ../api
rm -rf ../${destination}
[[ $PUSH_IMAGE -eq 1 ]] && {
docker push ${DOCKER_REPO:-'local'}/alerts:${git_sha1}
docker tag ${DOCKER_REPO:-'local'}/alerts:${git_sha1} ${DOCKER_REPO:-'local'}/alerts:${tag}latest
docker push ${DOCKER_REPO:-'local'}/alerts:${image_tag}
docker tag ${DOCKER_REPO:-'local'}/alerts:${image_tag} ${DOCKER_REPO:-'local'}/alerts:${tag}latest
docker push ${DOCKER_REPO:-'local'}/alerts:${tag}latest
}
[[ $SIGN_IMAGE -eq 1 ]] && {
cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/alerts:${image_tag}
}
echo "completed alerts build"
}

View file

@ -36,8 +36,13 @@ function build_crons(){
docker tag ${DOCKER_REPO:-'local'}/crons:${git_sha1} ${DOCKER_REPO:-'local'}/crons:${tag}latest
docker push ${DOCKER_REPO:-'local'}/crons:${tag}latest
}
[[ $SIGN_IMAGE -eq 1 ]] && {
cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/crons:${git_sha1}
}
echo "completed crons build"
}
check_prereq
build_crons $1
[[ $1 == "ee" ]] && {
build_crons $1
}

View file

@ -0,0 +1 @@
from . import sessions as sessions_legacy

View file

@ -1,9 +1,14 @@
import json
import logging
import time
from datetime import datetime
from decouple import config
import schemas
from chalicelib.core import notifications, slack, webhook
from chalicelib.core import notifications, webhook
from chalicelib.core.collaboration_msteams import MSTeams
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import pg_client, helper, email_helper
from chalicelib.utils.TimeUTC import TimeUTC
@ -24,10 +29,15 @@ def get(id):
def get_all(project_id):
with pg_client.PostgresClient() as cur:
query = cur.mogrify("""\
SELECT *
FROM public.alerts
WHERE project_id =%(project_id)s AND deleted_at ISNULL
ORDER BY created_at;""",
SELECT alerts.*,
COALESCE(metrics.name || '.' || (COALESCE(metric_series.name, 'series ' || index)) || '.count',
query ->> 'left') AS series_name
FROM public.alerts
LEFT JOIN metric_series USING (series_id)
LEFT JOIN metrics USING (metric_id)
WHERE alerts.project_id =%(project_id)s
AND alerts.deleted_at ISNULL
ORDER BY alerts.created_at;""",
{"project_id": project_id})
cur.execute(query=query)
all = helper.list_to_camel_case(cur.fetchall())
@ -95,7 +105,7 @@ def process_notifications(data):
for c in n["options"].pop("message"):
if c["type"] not in full:
full[c["type"]] = []
if c["type"] in ["slack", "email"]:
if c["type"] in ["slack", "msteams", "email"]:
full[c["type"]].append({
"notification": n,
"destination": c["value"]
@ -107,13 +117,21 @@ def process_notifications(data):
for t in full.keys():
for i in range(0, len(full[t]), BATCH_SIZE):
notifications_list = full[t][i:i + BATCH_SIZE]
if notifications_list is None or len(notifications_list) == 0:
break
if t == "slack":
try:
slack.send_batch(notifications_list=notifications_list)
send_to_slack_batch(notifications_list=notifications_list)
except Exception as e:
logging.error("!!!Error while sending slack notifications batch")
logging.error(str(e))
elif t == "msteams":
try:
send_to_msteams_batch(notifications_list=notifications_list)
except Exception as e:
logging.error("!!!Error while sending msteams notifications batch")
logging.error(str(e))
elif t == "email":
try:
send_by_email_batch(notifications_list=notifications_list)
@ -149,16 +167,60 @@ def send_by_email_batch(notifications_list):
time.sleep(1)
def send_to_slack_batch(notifications_list):
webhookId_map = {}
for n in notifications_list:
if n.get("destination") not in webhookId_map:
webhookId_map[n.get("destination")] = {"tenantId": n["notification"]["tenantId"], "batch": []}
webhookId_map[n.get("destination")]["batch"].append({"text": n["notification"]["description"] \
+ f"\n<{config('SITE_URL')}{n['notification']['buttonUrl']}|{n['notification']['buttonText']}>",
"title": n["notification"]["title"],
"title_link": n["notification"]["buttonUrl"],
"ts": datetime.now().timestamp()})
for batch in webhookId_map.keys():
Slack.send_batch(tenant_id=webhookId_map[batch]["tenantId"], webhook_id=batch,
attachments=webhookId_map[batch]["batch"])
def send_to_msteams_batch(notifications_list):
webhookId_map = {}
for n in notifications_list:
if n.get("destination") not in webhookId_map:
webhookId_map[n.get("destination")] = {"tenantId": n["notification"]["tenantId"], "batch": []}
link = f"[{n['notification']['buttonText']}]({config('SITE_URL')}{n['notification']['buttonUrl']})"
webhookId_map[n.get("destination")]["batch"].append({"type": "ColumnSet",
"style": "emphasis",
"separator": True,
"bleed": True,
"columns": [{
"width": "stretch",
"items": [
{"type": "TextBlock",
"text": n["notification"]["title"],
"style": "heading",
"size": "Large"},
{"type": "TextBlock",
"spacing": "small",
"text": n["notification"]["description"],
"wrap": True},
{"type": "TextBlock",
"spacing": "small",
"text": link}
]
}]})
for batch in webhookId_map.keys():
MSTeams.send_batch(tenant_id=webhookId_map[batch]["tenantId"], webhook_id=batch,
attachments=webhookId_map[batch]["batch"])
def delete(project_id, alert_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
UPDATE public.alerts
SET
deleted_at = timezone('utc'::text, now()),
active = FALSE
WHERE
alert_id = %(alert_id)s AND project_id=%(project_id)s;""",
cur.mogrify(""" UPDATE public.alerts
SET deleted_at = timezone('utc'::text, now()),
active = FALSE
WHERE alert_id = %(alert_id)s AND project_id=%(project_id)s;""",
{"alert_id": alert_id, "project_id": project_id})
)
return {"data": {"state": "success"}}

View file

@ -5,7 +5,7 @@ def get_all_alerts():
with pg_client.PostgresClient(long_query=True) as cur:
query = """SELECT -1 AS tenant_id,
alert_id,
project_id,
projects.project_id,
detection_method,
query,
options,
@ -13,10 +13,13 @@ def get_all_alerts():
alerts.name,
alerts.series_id,
filter,
change
change,
COALESCE(metrics.name || '.' || (COALESCE(metric_series.name, 'series ' || index)) || '.count',
query ->> 'left') AS series_name
FROM public.alerts
LEFT JOIN metric_series USING (series_id)
INNER JOIN projects USING (project_id)
LEFT JOIN metric_series USING (series_id)
LEFT JOIN metrics USING (metric_id)
WHERE alerts.deleted_at ISNULL
AND alerts.active
AND projects.active

View file

@ -49,10 +49,12 @@ LeftToDb = {
schemas.AlertColumn.errors__4xx_5xx__count: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)",
"condition": "status/100!=2"},
schemas.AlertColumn.errors__4xx__count: {"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(session_id)", "condition": "status/100=4"},
schemas.AlertColumn.errors__5xx__count: {"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(session_id)", "condition": "status/100=5"},
schemas.AlertColumn.errors__4xx__count: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(session_id)", "condition": "status/100=4"},
schemas.AlertColumn.errors__5xx__count: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(session_id)", "condition": "status/100=5"},
schemas.AlertColumn.errors__javascript__impacted_sessions__count: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(DISTINCT session_id)", "condition": "success= FALSE AND type='script'"},
@ -95,7 +97,7 @@ def can_check(a) -> bool:
a["options"].get("lastNotification") is None or
a["options"]["lastNotification"] <= 0 or
((now - a["options"]["lastNotification"]) > a["options"]["renotifyInterval"] * 60 * 1000)) \
and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000
and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000
def Build(a):
@ -119,7 +121,7 @@ def Build(a):
subQ = f"""SELECT {colDef["formula"]} AS value
FROM {colDef["table"]}
WHERE project_id = %(project_id)s
{"AND " + colDef["condition"] if colDef.get("condition") is not None else ""}"""
{"AND " + colDef["condition"] if colDef.get("condition") else ""}"""
j_s = colDef.get("joinSessions", True)
main_table = colDef["table"]
is_ss = main_table == "public.sessions"
@ -142,8 +144,7 @@ def Build(a):
"startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000,
"timestamp_sub2": TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000}
else:
sub1 = f"""{subQ} AND timestamp>=%(startDate)s
AND timestamp<=%(now)s
sub1 = f"""{subQ} {"AND timestamp >= %(startDate)s AND timestamp <= %(now)s" if not is_ss else ""}
{"AND start_ts >= %(startDate)s AND start_ts <= %(now)s" if j_s else ""}"""
params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
sub2 = f"""{subQ} {"AND timestamp < %(startDate)s AND timestamp >= %(timestamp_sub2)s" if not is_ss else ""}
@ -206,7 +207,7 @@ def process():
cur = cur.recreate(rollback=True)
if len(notifications) > 0:
cur.execute(
cur.mogrify(f"""UPDATE public.Alerts
cur.mogrify(f"""UPDATE public.alerts
SET options = options||'{{"lastNotification":{TimeUTC.now()}}}'::jsonb
WHERE alert_id IN %(ids)s;""", {"ids": tuple([n["alertId"] for n in notifications])}))
if len(notifications) > 0:
@ -228,7 +229,7 @@ def generate_notification(alert, result):
"alertId": alert["alertId"],
"tenantId": alert["tenantId"],
"title": alert["name"],
"description": f"has been triggered, {alert['query']['left']} = {left} ({alert['query']['operator']} {right}).",
"description": f"has been triggered, {alert['seriesName']} = {left} ({alert['query']['operator']} {right}).",
"buttonText": "Check metrics for more details",
"buttonUrl": f"/{alert['projectId']}/metrics",
"imageUrl": None,

View file

@ -4,7 +4,8 @@ from os.path import exists as path_exists, getsize
import jwt
import requests
from decouple import config
from starlette.exceptions import HTTPException
from starlette import status
from fastapi import HTTPException
import schemas
from chalicelib.core import projects
@ -181,6 +182,8 @@ def autocomplete(project_id, q: str, key: str = None):
except:
print("couldn't get response")
return {"errors": ["Something went wrong wile calling assist"]}
for r in results:
r["type"] = __change_keys(r["type"])
return {"data": results}
@ -192,10 +195,11 @@ def get_ice_servers():
def __get_efs_path():
efs_path = config("FS_DIR")
if not path_exists(efs_path):
raise HTTPException(400, f"EFS not found in path: {efs_path}")
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"EFS not found in path: {efs_path}")
if not access(efs_path, R_OK):
raise HTTPException(400, f"EFS found under: {efs_path}; but it is not readable, please check permissions")
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
detail=f"EFS found under: {efs_path}; but it is not readable, please check permissions")
return efs_path
@ -209,11 +213,12 @@ def get_raw_mob_by_id(project_id, session_id):
path_to_file = efs_path + "/" + __get_mob_path(project_id=project_id, session_id=session_id)
if path_exists(path_to_file):
if not access(path_to_file, R_OK):
raise HTTPException(400, f"Replay file found under: {efs_path};" +
f" but it is not readable, please check permissions")
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Replay file found under: {efs_path};" +
" but it is not readable, please check permissions")
# getsize return size in bytes, UNPROCESSED_MAX_SIZE is in Kb
if (getsize(path_to_file) / 1000) >= config("UNPROCESSED_MAX_SIZE", cast=int, default=200 * 1000):
raise HTTPException(413, "Replay file too large")
raise HTTPException(status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE, detail="Replay file too large")
return path_to_file
return None
@ -229,8 +234,9 @@ def get_raw_devtools_by_id(project_id, session_id):
path_to_file = efs_path + "/" + __get_devtools_path(project_id=project_id, session_id=session_id)
if path_exists(path_to_file):
if not access(path_to_file, R_OK):
raise HTTPException(400, f"Devtools file found under: {efs_path};"
f" but it is not readable, please check permissions")
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Devtools file found under: {efs_path};"
" but it is not readable, please check permissions")
return path_to_file
@ -262,3 +268,27 @@ def session_exists(project_id, session_id):
except:
print("couldn't get response")
return False
def __change_keys(key):
return {
"PAGETITLE": schemas.LiveFilterType.page_title.value,
"ACTIVE": "active",
"LIVE": "live",
"SESSIONID": schemas.LiveFilterType.session_id.value,
"METADATA": schemas.LiveFilterType.metadata.value,
"USERID": schemas.LiveFilterType.user_id.value,
"USERUUID": schemas.LiveFilterType.user_UUID.value,
"PROJECTKEY": "projectKey",
"REVID": schemas.LiveFilterType.rev_id.value,
"TIMESTAMP": "timestamp",
"TRACKERVERSION": schemas.LiveFilterType.tracker_version.value,
"ISSNIPPET": "isSnippet",
"USEROS": schemas.LiveFilterType.user_os.value,
"USERBROWSER": schemas.LiveFilterType.user_browser.value,
"USERBROWSERVERSION": schemas.LiveFilterType.user_browser_version.value,
"USERDEVICE": schemas.LiveFilterType.user_device.value,
"USERDEVICETYPE": schemas.LiveFilterType.user_device_type.value,
"USERCOUNTRY": schemas.LiveFilterType.user_country.value,
"PROJECTID": "projectId"
}.get(key.upper(), key)

View file

@ -15,7 +15,7 @@ def jwt_authorizer(token):
token[1],
config("jwt_secret"),
algorithms=config("jwt_algorithm"),
audience=[ f"front:{helper.get_stage_name()}"]
audience=[f"front:{helper.get_stage_name()}"]
)
except jwt.ExpiredSignatureError:
print("! JWT Expired signature")
@ -37,12 +37,16 @@ def jwt_context(context):
}
def get_jwt_exp(iat):
return iat // 1000 + config("JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000
def generate_jwt(id, tenant_id, iat, aud):
token = jwt.encode(
payload={
"userId": id,
"tenantId": tenant_id,
"exp": iat // 1000 + config("JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000,
"exp": get_jwt_exp(iat),
"iss": config("JWT_ISSUER"),
"iat": iat // 1000,
"aud": aud

View file

@ -25,24 +25,24 @@ def __get_autocomplete_table(value, project_id):
if e == schemas.FilterType.user_country:
c_list = countries.get_country_code_autocomplete(value)
if len(c_list) > 0:
sub_queries.append(f"""(SELECT DISTINCT ON(value) type, value
sub_queries.append(f"""(SELECT DISTINCT ON(value) '{e.value}' AS _type, value
FROM {TABLE}
WHERE project_id = %(project_id)s
AND type= '{e}'
AND type= '{e.value.upper()}'
AND value IN %(c_list)s)""")
continue
sub_queries.append(f"""(SELECT type, value
sub_queries.append(f"""(SELECT '{e.value}' AS _type, value
FROM {TABLE}
WHERE project_id = %(project_id)s
AND type= '{e}'
AND type= '{e.value.upper()}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 5)""")
if len(value) > 2:
sub_queries.append(f"""(SELECT type, value
sub_queries.append(f"""(SELECT '{e.value}' AS _type, value
FROM {TABLE}
WHERE project_id = %(project_id)s
AND type= '{e}'
AND type= '{e.value.upper()}'
AND value ILIKE %(value)s
ORDER BY value
LIMIT 5)""")
@ -62,8 +62,11 @@ def __get_autocomplete_table(value, project_id):
print(value)
print("--------------------")
raise err
results = helper.list_to_camel_case(cur.fetchall())
return results
results = cur.fetchall()
for r in results:
r["type"] = r.pop("_type")
results = helper.list_to_camel_case(results)
return results
def __generic_query(typename, value_length=None):
@ -72,7 +75,7 @@ def __generic_query(typename, value_length=None):
FROM {TABLE}
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND type='{typename.upper()}'
AND value IN %(value)s
ORDER BY value"""
@ -81,7 +84,7 @@ def __generic_query(typename, value_length=None):
FROM {TABLE}
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND type='{typename.upper()}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 5)
@ -90,7 +93,7 @@ def __generic_query(typename, value_length=None):
FROM {TABLE}
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND type='{typename.upper()}'
AND value ILIKE %(value)s
ORDER BY value
LIMIT 5);"""
@ -98,7 +101,7 @@ def __generic_query(typename, value_length=None):
FROM {TABLE}
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND type='{typename.upper()}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 10;"""
@ -135,13 +138,13 @@ def __generic_autocomplete_metas(typename):
return f
def __pg_errors_query(source=None, value_length=None):
def __errors_query(source=None, value_length=None):
if value_length is None or value_length > 2:
return f"""((SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{events.event_type.ERROR.ui_type}' AS type
FROM {events.event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
'{events.EventType.ERROR.ui_type}' AS type
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(svalue)s
@ -152,8 +155,8 @@ def __pg_errors_query(source=None, value_length=None):
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{events.event_type.ERROR.ui_type}' AS type
FROM {events.event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
'{events.EventType.ERROR.ui_type}' AS type
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
@ -164,8 +167,8 @@ def __pg_errors_query(source=None, value_length=None):
(SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{events.event_type.ERROR.ui_type}' AS type
FROM {events.event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
'{events.EventType.ERROR.ui_type}' AS type
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(value)s
@ -176,8 +179,8 @@ def __pg_errors_query(source=None, value_length=None):
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{events.event_type.ERROR.ui_type}' AS type
FROM {events.event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
'{events.EventType.ERROR.ui_type}' AS type
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
@ -187,8 +190,8 @@ def __pg_errors_query(source=None, value_length=None):
return f"""((SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{events.event_type.ERROR.ui_type}' AS type
FROM {events.event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
'{events.EventType.ERROR.ui_type}' AS type
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(svalue)s
@ -199,8 +202,8 @@ def __pg_errors_query(source=None, value_length=None):
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{events.event_type.ERROR.ui_type}' AS type
FROM {events.event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
'{events.EventType.ERROR.ui_type}' AS type
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
@ -209,11 +212,11 @@ def __pg_errors_query(source=None, value_length=None):
LIMIT 5));"""
def __search_pg_errors(project_id, value, key=None, source=None):
def __search_errors(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(__pg_errors_query(source,
value_length=len(value)),
cur.mogrify(__errors_query(source,
value_length=len(value)),
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value),
"source": source}))
@ -221,12 +224,12 @@ def __search_pg_errors(project_id, value, key=None, source=None):
return results
def __search_pg_errors_ios(project_id, value, key=None, source=None):
def __search_errors_ios(project_id, value, key=None, source=None):
if len(value) > 2:
query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{events.event_type.ERROR_IOS.ui_type}' AS type
FROM {events.event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
'{events.EventType.ERROR_IOS.ui_type}' AS type
FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
@ -235,8 +238,8 @@ def __search_pg_errors_ios(project_id, value, key=None, source=None):
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{events.event_type.ERROR_IOS.ui_type}' AS type
FROM {events.event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
'{events.EventType.ERROR_IOS.ui_type}' AS type
FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
@ -245,8 +248,8 @@ def __search_pg_errors_ios(project_id, value, key=None, source=None):
UNION ALL
(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{events.event_type.ERROR_IOS.ui_type}' AS type
FROM {events.event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
'{events.EventType.ERROR_IOS.ui_type}' AS type
FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
@ -255,8 +258,8 @@ def __search_pg_errors_ios(project_id, value, key=None, source=None):
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{events.event_type.ERROR_IOS.ui_type}' AS type
FROM {events.event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
'{events.EventType.ERROR_IOS.ui_type}' AS type
FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
@ -265,8 +268,8 @@ def __search_pg_errors_ios(project_id, value, key=None, source=None):
else:
query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{events.event_type.ERROR_IOS.ui_type}' AS type
FROM {events.event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
'{events.EventType.ERROR_IOS.ui_type}' AS type
FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
@ -275,8 +278,8 @@ def __search_pg_errors_ios(project_id, value, key=None, source=None):
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{events.event_type.ERROR_IOS.ui_type}' AS type
FROM {events.event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
'{events.EventType.ERROR_IOS.ui_type}' AS type
FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
@ -289,7 +292,7 @@ def __search_pg_errors_ios(project_id, value, key=None, source=None):
return results
def __search_pg_metadata(project_id, value, key=None, source=None):
def __search_metadata(project_id, value, key=None, source=None):
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
if len(meta_keys) == 0 or key is not None and key not in meta_keys.keys():
@ -323,4 +326,4 @@ def __search_pg_metadata(project_id, value, key=None, source=None):
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall())
return results
return results

View file

@ -0,0 +1,77 @@
import schemas
from chalicelib.core import sessions_mobs, sessions_legacy as sessions_search, events
from chalicelib.utils import pg_client, helper
SESSION_PROJECTION_COLS = """s.project_id,
s.session_id::text AS session_id,
s.user_uuid,
s.user_id,
s.user_os,
s.user_browser,
s.user_device,
s.user_device_type,
s.user_country,
s.start_ts,
s.duration,
s.events_count,
s.pages_count,
s.errors_count,
s.user_anonymous_id,
s.platform,
s.issue_score,
to_jsonb(s.issue_types) AS issue_types,
favorite_sessions.session_id NOTNULL AS favorite,
COALESCE((SELECT TRUE
FROM public.user_viewed_sessions AS fs
WHERE s.session_id = fs.session_id
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """
def search_short_session(data: schemas.FlatClickMapSessionsSearch, project_id, user_id, include_mobs: bool = True):
no_platform = True
for f in data.filters:
if f.type == schemas.FilterType.platform:
no_platform = False
break
if no_platform:
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.platform,
value=[schemas.PlatformType.desktop],
operator=schemas.SearchEventOperator._is))
full_args, query_part = sessions_search.search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=data.bookmarked, issue=None,
project_id=project_id, user_id=user_id)
with pg_client.PostgresClient() as cur:
data.order = schemas.SortOrderType.desc
data.sort = 'duration'
# meta_keys = metadata.get(project_id=project_id)
meta_keys = []
main_query = cur.mogrify(f"""SELECT {SESSION_PROJECTION_COLS}
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
{query_part}
ORDER BY {data.sort} {data.order.value}
LIMIT 1;""", full_args)
# print("--------------------")
# print(main_query)
# print("--------------------")
try:
cur.execute(main_query)
except Exception as err:
print("--------- CLICK MAP SHORT SESSION SEARCH QUERY EXCEPTION -----------")
print(main_query.decode('UTF-8'))
print("--------- PAYLOAD -----------")
print(data.json())
print("--------------------")
raise err
session = cur.fetchone()
if session:
if include_mobs:
session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id)
session['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session["session_id"])
session['events'] = events.get_by_session_id(project_id=project_id, session_id=session["session_id"],
event_type=schemas.EventType.location)
return helper.dict_to_camel_case(session)

View file

@ -0,0 +1,45 @@
from abc import ABC, abstractmethod
import schemas
class BaseCollaboration(ABC):
@classmethod
@abstractmethod
def add(cls, tenant_id, data: schemas.AddCollaborationSchema):
pass
@classmethod
@abstractmethod
def say_hello(cls, url):
pass
@classmethod
@abstractmethod
def send_raw(cls, tenant_id, webhook_id, body):
pass
@classmethod
@abstractmethod
def send_batch(cls, tenant_id, webhook_id, attachments):
pass
@classmethod
@abstractmethod
def __share(cls, tenant_id, integration_id, attachments):
pass
@classmethod
@abstractmethod
def share_session(cls, tenant_id, project_id, session_id, user, comment, integration_id=None):
pass
@classmethod
@abstractmethod
def share_error(cls, tenant_id, project_id, error_id, user, comment, integration_id=None):
pass
@classmethod
@abstractmethod
def get_integration(cls, tenant_id, integration_id=None):
pass

View file

@ -0,0 +1,195 @@
import json
import requests
from decouple import config
from fastapi import HTTPException
from starlette import status
import schemas
from chalicelib.core import webhook
from chalicelib.core.collaboration_base import BaseCollaboration
class MSTeams(BaseCollaboration):
@classmethod
def add(cls, tenant_id, data: schemas.AddCollaborationSchema):
if webhook.exists_by_name(tenant_id=tenant_id, name=data.name, exclude_id=None,
webhook_type=schemas.WebhookType.msteams):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if cls.say_hello(data.url):
return webhook.add(tenant_id=tenant_id,
endpoint=data.url,
webhook_type=schemas.WebhookType.msteams,
name=data.name)
return None
# https://messagecardplayground.azurewebsites.net
# https://adaptivecards.io/designer/
@classmethod
def say_hello(cls, url):
r = requests.post(
url=url,
json={
"@type": "MessageCard",
"@context": "https://schema.org/extensions",
"summary": "Hello message",
"title": "Welcome to OpenReplay"
})
if r.status_code != 200:
print("MSTeams integration failed")
print(r.text)
return False
return True
@classmethod
def send_raw(cls, tenant_id, webhook_id, body):
integration = cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id)
if integration is None:
return {"errors": ["msteams integration not found"]}
try:
r = requests.post(
url=integration["endpoint"],
json=body,
timeout=5)
if r.status_code != 200:
print(f"!! issue sending msteams raw; webhookId:{webhook_id} code:{r.status_code}")
print(r.text)
return None
except requests.exceptions.Timeout:
print(f"!! Timeout sending msteams raw webhookId:{webhook_id}")
return None
except Exception as e:
print(f"!! Issue sending msteams raw webhookId:{webhook_id}")
print(str(e))
return None
return {"data": r.text}
@classmethod
def send_batch(cls, tenant_id, webhook_id, attachments):
integration = cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id)
if integration is None:
return {"errors": ["msteams integration not found"]}
print(f"====> sending msteams batch notification: {len(attachments)}")
for i in range(0, len(attachments), 100):
print(json.dumps({"type": "message",
"attachments": [
{"contentType": "application/vnd.microsoft.card.adaptive",
"contentUrl": None,
"content": {
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"type": "AdaptiveCard",
"version": "1.2",
"body": attachments[i:i + 100]}}
]}))
r = requests.post(
url=integration["endpoint"],
json={"type": "message",
"attachments": [
{"contentType": "application/vnd.microsoft.card.adaptive",
"contentUrl": None,
"content": {
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"type": "AdaptiveCard",
"version": "1.2",
"body": attachments[i:i + 100]}}
]})
if r.status_code != 200:
print("!!!! something went wrong")
print(r)
print(r.text)
@classmethod
def __share(cls, tenant_id, integration_id, attachement):
integration = cls.get_integration(tenant_id=tenant_id, integration_id=integration_id)
if integration is None:
return {"errors": ["Microsoft Teams integration not found"]}
r = requests.post(
url=integration["endpoint"],
json={"type": "message",
"attachments": [
{"contentType": "application/vnd.microsoft.card.adaptive",
"contentUrl": None,
"content": {
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"type": "AdaptiveCard",
"version": "1.5",
"body": [attachement]}}
]
})
return r.text
@classmethod
def share_session(cls, tenant_id, project_id, session_id, user, comment, integration_id=None):
title = f"[{user}](mailto:{user}) has shared the below session!"
link = f"{config('SITE_URL')}/{project_id}/session/{session_id}"
link = f"[{link}]({link})"
args = {"type": "ColumnSet",
"style": "emphasis",
"separator": True,
"bleed": True,
"columns": [{
"width": "stretch",
"items": [
{"type": "TextBlock",
"text": title,
"style": "heading",
"size": "Large"},
{"type": "TextBlock",
"spacing": "small",
"text": link}
]
}]}
if comment and len(comment) > 0:
args["columns"][0]["items"].append({
"type": "TextBlock",
"spacing": "small",
"text": comment
})
data = cls.__share(tenant_id, integration_id, attachement=args)
if "errors" in data:
return data
return {"data": data}
@classmethod
def share_error(cls, tenant_id, project_id, error_id, user, comment, integration_id=None):
title = f"[{user}](mailto:{user}) has shared the below error!"
link = f"{config('SITE_URL')}/{project_id}/errors/{error_id}"
link = f"[{link}]({link})"
args = {"type": "ColumnSet",
"style": "emphasis",
"separator": True,
"bleed": True,
"columns": [{
"width": "stretch",
"items": [
{"type": "TextBlock",
"text": title,
"style": "heading",
"size": "Large"},
{"type": "TextBlock",
"spacing": "small",
"text": link}
]
}]}
if comment and len(comment) > 0:
args["columns"][0]["items"].append({
"type": "TextBlock",
"spacing": "small",
"text": comment
})
data = cls.__share(tenant_id, integration_id, attachement=args)
if "errors" in data:
return data
return {"data": data}
@classmethod
def get_integration(cls, tenant_id, integration_id=None):
if integration_id is not None:
return webhook.get_webhook(tenant_id=tenant_id, webhook_id=integration_id,
webhook_type=schemas.WebhookType.msteams)
integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.msteams)
if integrations is None or len(integrations) == 0:
return None
return integrations[0]

View file

@ -1,19 +1,26 @@
import requests
from decouple import config
from datetime import datetime
from fastapi import HTTPException
from starlette import status
import schemas
from chalicelib.core import webhook
from chalicelib.core.collaboration_base import BaseCollaboration
class Slack:
class Slack(BaseCollaboration):
@classmethod
def add_channel(cls, tenant_id, **args):
url = args["url"]
name = args["name"]
if cls.say_hello(url):
def add(cls, tenant_id, data: schemas.AddCollaborationSchema):
if webhook.exists_by_name(tenant_id=tenant_id, name=data.name, exclude_id=None,
webhook_type=schemas.WebhookType.slack):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if cls.say_hello(data.url):
return webhook.add(tenant_id=tenant_id,
endpoint=url,
webhook_type="slack",
name=name)
endpoint=data.url,
webhook_type=schemas.WebhookType.slack,
name=data.name)
return None
@classmethod
@ -34,40 +41,9 @@ class Slack:
return False
return True
@classmethod
def send_text_attachments(cls, tenant_id, webhook_id, text, **args):
integration = cls.__get(tenant_id=tenant_id, integration_id=webhook_id)
if integration is None:
return {"errors": ["slack integration not found"]}
try:
r = requests.post(
url=integration["endpoint"],
json={
"attachments": [
{
"text": text,
"ts": datetime.now().timestamp(),
**args
}
]
},
timeout=5)
if r.status_code != 200:
print(f"!! issue sending slack text attachments; webhookId:{webhook_id} code:{r.status_code}")
print(r.text)
return None
except requests.exceptions.Timeout:
print(f"!! Timeout sending slack text attachments webhookId:{webhook_id}")
return None
except Exception as e:
print(f"!! Issue sending slack text attachments webhookId:{webhook_id}")
print(str(e))
return None
return {"data": r.text}
@classmethod
def send_raw(cls, tenant_id, webhook_id, body):
integration = cls.__get(tenant_id=tenant_id, integration_id=webhook_id)
integration = cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id)
if integration is None:
return {"errors": ["slack integration not found"]}
try:
@ -90,7 +66,7 @@ class Slack:
@classmethod
def send_batch(cls, tenant_id, webhook_id, attachments):
integration = cls.__get(tenant_id=tenant_id, integration_id=webhook_id)
integration = cls.get_integration(tenant_id=tenant_id, integration_id=webhook_id)
if integration is None:
return {"errors": ["slack integration not found"]}
print(f"====> sending slack batch notification: {len(attachments)}")
@ -105,24 +81,12 @@ class Slack:
print(r.text)
@classmethod
def __share_to_slack(cls, tenant_id, integration_id, fallback, pretext, title, title_link, text):
integration = cls.__get(tenant_id=tenant_id, integration_id=integration_id)
def __share(cls, tenant_id, integration_id, attachement):
integration = cls.get_integration(tenant_id=tenant_id, integration_id=integration_id)
if integration is None:
return {"errors": ["slack integration not found"]}
r = requests.post(
url=integration["endpoint"],
json={
"attachments": [
{
"fallback": fallback,
"pretext": pretext,
"title": title,
"title_link": title_link,
"text": text,
"ts": datetime.now().timestamp()
}
]
})
attachement["ts"] = datetime.now().timestamp()
r = requests.post(url=integration["endpoint"], json={"attachments": [attachement]})
return r.text
@classmethod
@ -132,7 +96,10 @@ class Slack:
"title": f"{config('SITE_URL')}/{project_id}/session/{session_id}",
"title_link": f"{config('SITE_URL')}/{project_id}/session/{session_id}",
"text": comment}
return {"data": cls.__share_to_slack(tenant_id, integration_id, **args)}
data = cls.__share(tenant_id, integration_id, attachement=args)
if "errors" in data:
return data
return {"data": data}
@classmethod
def share_error(cls, tenant_id, project_id, error_id, user, comment, integration_id=None):
@ -141,19 +108,18 @@ class Slack:
"title": f"{config('SITE_URL')}/{project_id}/errors/{error_id}",
"title_link": f"{config('SITE_URL')}/{project_id}/errors/{error_id}",
"text": comment}
return {"data": cls.__share_to_slack(tenant_id, integration_id, **args)}
data = cls.__share(tenant_id, integration_id, attachement=args)
if "errors" in data:
return data
return {"data": data}
@classmethod
def has_slack(cls, tenant_id):
integration = cls.__get(tenant_id=tenant_id)
return not (integration is None or len(integration) == 0)
@classmethod
def __get(cls, tenant_id, integration_id=None):
def get_integration(cls, tenant_id, integration_id=None):
if integration_id is not None:
return webhook.get(tenant_id=tenant_id, webhook_id=integration_id)
return webhook.get_webhook(tenant_id=tenant_id, webhook_id=integration_id,
webhook_type=schemas.WebhookType.slack)
integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type="slack")
integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.slack)
if integrations is None or len(integrations) == 0:
return None
return integrations[0]

View file

@ -1,15 +1,19 @@
import json
from typing import Union
from decouple import config
from fastapi import HTTPException
from starlette import status
import schemas
from chalicelib.core import sessions, funnels, errors, issues
from chalicelib.utils import helper, pg_client
from chalicelib.core import sessions, funnels, errors, issues, metrics, click_maps, sessions_mobs
from chalicelib.utils import helper, pg_client, s3
from chalicelib.utils.TimeUTC import TimeUTC
PIE_CHART_GROUP = 5
def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema):
def __try_live(project_id, data: schemas.CreateCardSchema):
results = []
for i, s in enumerate(data.series):
s.filter.startDate = data.startTimestamp
@ -42,11 +46,11 @@ def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema):
return results
def __is_funnel_chart(data: schemas.TryCustomMetricsPayloadSchema):
def __is_funnel_chart(data: schemas.CreateCardSchema):
return data.metric_type == schemas.MetricType.funnel
def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema):
def __get_funnel_chart(project_id, data: schemas.CreateCardSchema):
if len(data.series) == 0:
return {
"stages": [],
@ -57,12 +61,12 @@ def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema):
return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter)
def __is_errors_list(data):
def __is_errors_list(data: schemas.CreateCardSchema):
return data.metric_type == schemas.MetricType.table \
and data.metric_of == schemas.TableMetricOfType.errors
and data.metric_of == schemas.MetricOfTable.errors
def __get_errors_list(project_id, user_id, data):
def __get_errors_list(project_id, user_id, data: schemas.CreateCardSchema):
if len(data.series) == 0:
return {
"total": 0,
@ -75,12 +79,12 @@ def __get_errors_list(project_id, user_id, data):
return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id)
def __is_sessions_list(data):
def __is_sessions_list(data: schemas.CreateCardSchema):
return data.metric_type == schemas.MetricType.table \
and data.metric_of == schemas.TableMetricOfType.sessions
and data.metric_of == schemas.MetricOfTable.sessions
def __get_sessions_list(project_id, user_id, data):
def __get_sessions_list(project_id, user_id, data: schemas.CreateCardSchema):
if len(data.series) == 0:
print("empty series")
return {
@ -94,14 +98,37 @@ def __get_sessions_list(project_id, user_id, data):
return sessions.search_sessions(data=data.series[0].filter, project_id=project_id, user_id=user_id)
def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema, user_id=None):
if __is_funnel_chart(data):
def __is_predefined(data: schemas.CreateCardSchema):
return data.is_template
def __is_click_map(data: schemas.CreateCardSchema):
return data.metric_type == schemas.MetricType.click_map
def __get_click_map_chart(project_id, user_id, data: schemas.CreateCardSchema, include_mobs: bool = True):
if len(data.series) == 0:
return None
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
return click_maps.search_short_session(project_id=project_id, user_id=user_id,
data=schemas.FlatClickMapSessionsSearch(**data.series[0].filter.dict()),
include_mobs=include_mobs)
def merged_live(project_id, data: schemas.CreateCardSchema, user_id=None):
if data.is_template:
return get_predefined_metric(key=data.metric_of, project_id=project_id, data=data.dict())
elif __is_funnel_chart(data):
return __get_funnel_chart(project_id=project_id, data=data)
elif __is_errors_list(data):
return __get_errors_list(project_id=project_id, user_id=user_id, data=data)
elif __is_sessions_list(data):
return __get_sessions_list(project_id=project_id, user_id=user_id, data=data)
elif __is_click_map(data):
return __get_click_map_chart(project_id=project_id, user_id=user_id, data=data)
elif len(data.series) == 0:
return []
series_charts = __try_live(project_id=project_id, data=data)
if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table:
return series_charts
@ -113,69 +140,67 @@ def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema, user_id
return results
def __merge_metric_with_data(metric, data: Union[schemas.CustomMetricChartPayloadSchema,
schemas.CustomMetricSessionsPayloadSchema]) \
-> Union[schemas.CreateCustomMetricsSchema, None]:
def __merge_metric_with_data(metric: schemas.CreateCardSchema,
data: schemas.CardChartSchema) -> schemas.CreateCardSchema:
if data.series is not None and len(data.series) > 0:
metric["series"] = data.series
metric: schemas.CreateCustomMetricsSchema = schemas.CreateCustomMetricsSchema.parse_obj({**data.dict(), **metric})
metric.series = data.series
metric: schemas.CreateCardSchema = schemas.CreateCardSchema(
**{**data.dict(by_alias=True), **metric.dict(by_alias=True)})
if len(data.filters) > 0 or len(data.events) > 0:
for s in metric.series:
if len(data.filters) > 0:
s.filter.filters += data.filters
if len(data.events) > 0:
s.filter.events += data.events
metric.limit = data.limit
metric.page = data.page
metric.startTimestamp = data.startTimestamp
metric.endTimestamp = data.endTimestamp
return metric
def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema, metric=None):
if metric is None:
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
def make_chart(project_id, user_id, data: schemas.CardChartSchema, metric: schemas.CreateCardSchema):
if metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
metric: schemas.CreateCardSchema = __merge_metric_with_data(metric=metric, data=data)
return merged_live(project_id=project_id, data=metric, user_id=user_id)
# if __is_funnel_chart(metric):
# return __get_funnel_chart(project_id=project_id, data=metric)
# elif __is_errors_list(metric):
# return __get_errors_list(project_id=project_id, user_id=user_id, data=metric)
#
# series_charts = __try_live(project_id=project_id, data=metric)
# if metric.view_type == schemas.MetricTimeseriesViewType.progress or metric.metric_type == schemas.MetricType.table:
# return series_charts
# results = [{}] * len(series_charts[0])
# for i in range(len(results)):
# for j, series_chart in enumerate(series_charts):
# results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"],
# metric.series[j].name: series_chart[i]["count"]}
# return results
def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
def get_sessions(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
# raw_metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False, include_data=True)
raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if raw_metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
metric: schemas.CreateCardSchema = schemas.CreateCardSchema(**raw_metric)
metric: schemas.CreateCardSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
results = []
# is_click_map = False
# if __is_click_map(metric) and raw_metric.get("data") is not None:
# is_click_map = True
for s in metric.series:
s.filter.startDate = data.startTimestamp
s.filter.endDate = data.endTimestamp
s.filter.limit = data.limit
s.filter.page = data.page
# if is_click_map:
# results.append(
# {"seriesId": s.series_id, "seriesName": s.name, "total": 1, "sessions": [raw_metric["data"]]})
# break
results.append({"seriesId": s.series_id, "seriesName": s.name,
**sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
return results
def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if raw_metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
metric: schemas.CreateCardSchema = schemas.CreateCardSchema(**raw_metric)
metric: schemas.CreateCardSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
for s in metric.series:
@ -187,11 +212,12 @@ def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CustomMetric
**funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)}
def get_errors_list(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
def get_errors_list(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if raw_metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
metric: schemas.CreateCardSchema = schemas.CreateCardSchema(**raw_metric)
metric: schemas.CreateCardSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
for s in metric.series:
@ -203,7 +229,7 @@ def get_errors_list(project_id, user_id, metric_id, data: schemas.CustomMetricSe
**errors.search(data=s.filter, project_id=project_id, user_id=user_id)}
def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadSchema):
def try_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
results = []
if data.series is None:
return results
@ -212,50 +238,60 @@ def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadS
s.filter.endDate = data.endTimestamp
s.filter.limit = data.limit
s.filter.page = data.page
if len(data.filters) > 0:
s.filter.filters += data.filters
if len(data.events) > 0:
s.filter.events += data.events
results.append({"seriesId": None, "seriesName": s.name,
**sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
return results
def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema, dashboard=False):
def create(project_id, user_id, data: schemas.CreateCardSchema, dashboard=False):
with pg_client.PostgresClient() as cur:
_data = {}
session_data = None
if __is_click_map(data):
session_data = __get_click_map_chart(project_id=project_id, user_id=user_id,
data=data, include_mobs=False)
if session_data is not None:
session_data = json.dumps(session_data)
_data = {"session_data": session_data}
for i, s in enumerate(data.series):
for k in s.dict().keys():
_data[f"{k}_{i}"] = s.__getattribute__(k)
_data[f"index_{i}"] = i
_data[f"filter_{i}"] = s.filter.json()
series_len = len(data.series)
data.series = None
params = {"user_id": user_id, "project_id": project_id,
"default_config": json.dumps(data.config.dict()),
**data.dict(), **_data}
query = cur.mogrify(f"""\
WITH m AS (INSERT INTO metrics (project_id, user_id, name, is_public,
view_type, metric_type, metric_of, metric_value,
metric_format, default_config)
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
%(metric_format)s, %(default_config)s)
RETURNING *)
INSERT
INTO metric_series(metric_id, index, name, filter)
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)"
for i in range(series_len)])}
RETURNING metric_id;""", params)
params = {"user_id": user_id, "project_id": project_id, **data.dict(), **_data}
params["default_config"] = json.dumps(data.default_config.dict())
query = """INSERT INTO metrics (project_id, user_id, name, is_public,
view_type, metric_type, metric_of, metric_value,
metric_format, default_config, thumbnail, data)
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
%(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s)
RETURNING metric_id"""
if len(data.series) > 0:
query = f"""WITH m AS ({query})
INSERT INTO metric_series(metric_id, index, name, filter)
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)"
for i in range(series_len)])}
RETURNING metric_id;"""
cur.execute(
query
)
query = cur.mogrify(query, params)
# print("-------")
# print(query)
# print("-------")
cur.execute(query)
r = cur.fetchone()
if dashboard:
return r["metric_id"]
return {"data": get(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)}
return {"data": get_card(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)}
def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSchema):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
def update(metric_id, user_id, project_id, data: schemas.UpdateCardSchema):
metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
return None
series_ids = [r["seriesId"] for r in metric["series"]]
@ -267,7 +303,7 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche
"user_id": user_id, "project_id": project_id, "view_type": data.view_type,
"metric_type": data.metric_type, "metric_of": data.metric_of,
"metric_value": data.metric_value, "metric_format": data.metric_format,
"config": json.dumps(data.config.dict())}
"config": json.dumps(data.default_config.dict()), "thumbnail": data.thumbnail}
for i, s in enumerate(data.series):
prefix = "u_"
if s.index is None:
@ -318,16 +354,33 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche
metric_of= %(metric_of)s, metric_value= %(metric_value)s,
metric_format= %(metric_format)s,
edited_at = timezone('utc'::text, now()),
default_config = %(config)s
default_config = %(config)s,
thumbnail = %(thumbnail)s
WHERE metric_id = %(metric_id)s
AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)
RETURNING metric_id;""", params)
cur.execute(query)
return get(metric_id=metric_id, project_id=project_id, user_id=user_id)
return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id)
def get_all(project_id, user_id, include_series=False):
def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_series=False):
constraints = ["metrics.project_id = %(project_id)s",
"metrics.deleted_at ISNULL"]
params = {"project_id": project_id, "user_id": user_id,
"offset": (data.page - 1) * data.limit,
"limit": data.limit, }
if data.mine_only:
constraints.append("user_id = %(user_id)s")
else:
constraints.append("(user_id = %(user_id)s OR metrics.is_public)")
if data.shared_only:
constraints.append("is_public")
if data.query is not None and len(data.query) > 0:
constraints.append("(name ILIKE %(query)s OR owner.owner_email ILIKE %(query)s)")
params["query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
with pg_client.PostgresClient() as cur:
sub_join = ""
if include_series:
@ -336,35 +389,32 @@ def get_all(project_id, user_id, include_series=False):
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)"""
cur.execute(
cur.mogrify(
f"""SELECT *
FROM metrics
{sub_join}
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
FROM (SELECT DISTINCT dashboard_id, name, is_public
FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id)
WHERE deleted_at ISNULL
AND dashboard_widgets.metric_id = metrics.metric_id
AND project_id = %(project_id)s
AND ((dashboards.user_id = %(user_id)s OR is_public))) AS connected_dashboards
) AS connected_dashboards ON (TRUE)
LEFT JOIN LATERAL (SELECT email AS owner_email
FROM users
WHERE deleted_at ISNULL
AND users.user_id = metrics.user_id
) AS owner ON (TRUE)
WHERE metrics.project_id = %(project_id)s
AND metrics.deleted_at ISNULL
AND (user_id = %(user_id)s OR metrics.is_public)
ORDER BY metrics.edited_at DESC, metrics.created_at DESC;""",
{"project_id": project_id, "user_id": user_id}
)
)
query = cur.mogrify(
f"""SELECT metric_id, project_id, user_id, name, is_public, created_at, edited_at,
metric_type, metric_of, metric_format, metric_value, view_type, is_pinned,
dashboards, owner_email, default_config AS config, thumbnail
FROM metrics
{sub_join}
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
FROM (SELECT DISTINCT dashboard_id, name, is_public
FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id)
WHERE deleted_at ISNULL
AND dashboard_widgets.metric_id = metrics.metric_id
AND project_id = %(project_id)s
AND ((dashboards.user_id = %(user_id)s OR is_public))) AS connected_dashboards
) AS connected_dashboards ON (TRUE)
LEFT JOIN LATERAL (SELECT email AS owner_email
FROM users
WHERE deleted_at ISNULL
AND users.user_id = metrics.user_id
) AS owner ON (TRUE)
WHERE {" AND ".join(constraints)}
ORDER BY created_at {data.order.value}
LIMIT %(limit)s OFFSET %(offset)s;""", params)
cur.execute(query)
rows = cur.fetchall()
if include_series:
for r in rows:
# r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
for s in r["series"]:
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
else:
@ -375,6 +425,17 @@ def get_all(project_id, user_id, include_series=False):
return rows
def get_all(project_id, user_id):
default_search = schemas.SearchCardsSchema()
result = rows = search_all(project_id=project_id, user_id=user_id, data=default_search)
while len(rows) == default_search.limit:
default_search.page += 1
rows = search_all(project_id=project_id, user_id=user_id, data=default_search)
result += rows
return result
def delete(project_id, metric_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
@ -390,37 +451,40 @@ def delete(project_id, metric_id, user_id):
return {"state": "success"}
def get(metric_id, project_id, user_id, flatten=True):
def get_card(metric_id, project_id, user_id, flatten: bool = True, include_data: bool = False):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT *, default_config AS config
FROM metrics
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
FROM (SELECT dashboard_id, name, is_public
FROM dashboards
WHERE deleted_at ISNULL
AND project_id = %(project_id)s
AND ((user_id = %(user_id)s OR is_public))) AS connected_dashboards
) AS connected_dashboards ON (TRUE)
LEFT JOIN LATERAL (SELECT email AS owner_email
FROM users
WHERE deleted_at ISNULL
AND users.user_id = metrics.user_id
) AS owner ON (TRUE)
WHERE metrics.project_id = %(project_id)s
AND metrics.deleted_at ISNULL
AND (metrics.user_id = %(user_id)s OR metrics.is_public)
AND metrics.metric_id = %(metric_id)s
ORDER BY created_at;""",
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
)
query = cur.mogrify(
f"""SELECT metric_id, project_id, user_id, name, is_public, created_at, deleted_at, edited_at, metric_type,
view_type, metric_of, metric_value, metric_format, is_pinned, default_config,
default_config AS config,series, dashboards, owner_email
{',data' if include_data else ''}
FROM metrics
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
FROM (SELECT dashboard_id, name, is_public
FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id)
WHERE deleted_at ISNULL
AND project_id = %(project_id)s
AND ((dashboards.user_id = %(user_id)s OR is_public))
AND metric_id = %(metric_id)s) AS connected_dashboards
) AS connected_dashboards ON (TRUE)
LEFT JOIN LATERAL (SELECT email AS owner_email
FROM users
WHERE deleted_at ISNULL
AND users.user_id = metrics.user_id
) AS owner ON (TRUE)
WHERE metrics.project_id = %(project_id)s
AND metrics.deleted_at ISNULL
AND (metrics.user_id = %(user_id)s OR metrics.is_public)
AND metrics.metric_id = %(metric_id)s
ORDER BY created_at;""",
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
)
cur.execute(query)
row = cur.fetchone()
if row is None:
return None
@ -432,39 +496,6 @@ def get(metric_id, project_id, user_id, flatten=True):
return helper.dict_to_camel_case(row)
def get_with_template(metric_id, project_id, user_id, include_dashboard=True):
with pg_client.PostgresClient() as cur:
sub_query = ""
if include_dashboard:
sub_query = """LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
FROM (SELECT dashboard_id, name, is_public
FROM dashboards
WHERE deleted_at ISNULL
AND project_id = %(project_id)s
AND ((user_id = %(user_id)s OR is_public))) AS connected_dashboards
) AS connected_dashboards ON (TRUE)"""
cur.execute(
cur.mogrify(
f"""SELECT *, default_config AS config
FROM metrics
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
{sub_query}
WHERE (metrics.project_id = %(project_id)s OR metrics.project_id ISNULL)
AND metrics.deleted_at ISNULL
AND (metrics.user_id = %(user_id)s OR metrics.is_public)
AND metrics.metric_id = %(metric_id)s
ORDER BY created_at;""",
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
)
)
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def get_series_for_alert(project_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
@ -499,17 +530,18 @@ def change_state(project_id, metric_id, user_id, status):
AND (user_id = %(user_id)s OR is_public);""",
{"metric_id": metric_id, "status": status, "user_id": user_id})
)
return get(metric_id=metric_id, project_id=project_id, user_id=user_id)
return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id)
def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
data: schemas.CustomMetricSessionsPayloadSchema
data: schemas.CardSessionsSchema
# , range_value=None, start_date=None, end_date=None
):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
metric: schemas.CreateCardSchema = schemas.CreateCardSchema(**metric)
metric: schemas.CreateCardSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
for s in metric.series:
@ -538,3 +570,81 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
issue=issue, data=s.filter)
if issue is not None else {"total": 0, "sessions": []},
"issue": issue}
def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardChartSchema):
raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, include_data=True)
if raw_metric is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="card not found")
metric: schemas.CreateCardSchema = schemas.CreateCardSchema(**raw_metric)
if metric.is_template:
return get_predefined_metric(key=metric.metric_of, project_id=project_id, data=data.dict())
elif __is_click_map(metric):
if raw_metric["data"]:
keys = sessions_mobs. \
__get_mob_keys(project_id=project_id, session_id=raw_metric["data"]["sessionId"])
mob_exists = False
for k in keys:
if s3.exists(bucket=config("sessions_bucket"), key=k):
mob_exists = True
break
if mob_exists:
raw_metric["data"]['domURL'] = sessions_mobs.get_urls(session_id=raw_metric["data"]["sessionId"],
project_id=project_id)
raw_metric["data"]['mobsUrl'] = sessions_mobs.get_urls_depercated(
session_id=raw_metric["data"]["sessionId"])
return raw_metric["data"]
return make_chart(project_id=project_id, user_id=user_id, data=data, metric=metric)
PREDEFINED = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions,
schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time,
schemas.MetricOfWebVitals.avg_page_load_time: metrics.get_application_activity_avg_page_load_time,
schemas.MetricOfWebVitals.avg_request_load_time: metrics.get_application_activity_avg_request_load_time,
schemas.MetricOfWebVitals.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start,
schemas.MetricOfWebVitals.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel,
schemas.MetricOfWebVitals.avg_visited_pages: metrics.get_user_activity_avg_visited_pages,
schemas.MetricOfWebVitals.avg_session_duration: metrics.get_user_activity_avg_session_duration,
schemas.MetricOfWebVitals.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfWebVitals.avg_pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfWebVitals.avg_response_time: metrics.get_top_metrics_avg_response_time,
schemas.MetricOfWebVitals.avg_first_paint: metrics.get_top_metrics_avg_first_paint,
schemas.MetricOfWebVitals.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded,
schemas.MetricOfWebVitals.avg_till_first_byte: metrics.get_top_metrics_avg_till_first_bit,
schemas.MetricOfWebVitals.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive,
schemas.MetricOfWebVitals.count_requests: metrics.get_top_metrics_count_requests,
schemas.MetricOfWebVitals.avg_time_to_render: metrics.get_time_to_render,
schemas.MetricOfWebVitals.avg_used_js_heap_size: metrics.get_memory_consumption,
schemas.MetricOfWebVitals.avg_cpu: metrics.get_avg_cpu,
schemas.MetricOfWebVitals.avg_fps: metrics.get_avg_fps,
schemas.MetricOfErrors.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors,
schemas.MetricOfErrors.domains_errors_4xx: metrics.get_domains_errors_4xx,
schemas.MetricOfErrors.domains_errors_5xx: metrics.get_domains_errors_5xx,
schemas.MetricOfErrors.errors_per_domains: metrics.get_errors_per_domains,
schemas.MetricOfErrors.calls_errors: metrics.get_calls_errors,
schemas.MetricOfErrors.errors_per_type: metrics.get_errors_per_type,
schemas.MetricOfErrors.resources_by_party: metrics.get_resources_by_party,
schemas.MetricOfPerformance.speed_location: metrics.get_speed_index_location,
schemas.MetricOfPerformance.slowest_domains: metrics.get_slowest_domains,
schemas.MetricOfPerformance.sessions_per_browser: metrics.get_sessions_per_browser,
schemas.MetricOfPerformance.time_to_render: metrics.get_time_to_render,
schemas.MetricOfPerformance.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages,
schemas.MetricOfPerformance.memory_consumption: metrics.get_memory_consumption,
schemas.MetricOfPerformance.cpu: metrics.get_avg_cpu,
schemas.MetricOfPerformance.fps: metrics.get_avg_fps,
schemas.MetricOfPerformance.crashes: metrics.get_crashes,
schemas.MetricOfPerformance.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete,
schemas.MetricOfPerformance.pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfPerformance.pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfPerformance.pages_response_time_distribution: metrics.get_pages_response_time_distribution,
schemas.MetricOfResources.missing_resources: metrics.get_missing_resources_trend,
schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources,
schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time,
schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, }
def get_predefined_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \
schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict):
return PREDEFINED.get(key, lambda *args: None)(project_id=project_id, **data)

View file

@ -1,48 +1,11 @@
import json
import schemas
from chalicelib.core import custom_metrics, metrics
from chalicelib.core import custom_metrics
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
# category name should be lower cased
CATEGORY_DESCRIPTION = {
'web vitals': 'A set of metrics that assess app performance on criteria such as load time, load performance, and stability.',
'custom': 'Previously created custom metrics by me and my team.',
'errors': 'Keep a closer eye on errors and track their type, origin and domain.',
'performance': 'Optimize your apps performance by tracking slow domains, page response times, memory consumption, CPU usage and more.',
'resources': 'Find out which resources are missing and those that may be slowing your web app.'
}
def get_templates(project_id, user_id):
with pg_client.PostgresClient() as cur:
pg_query = cur.mogrify(f"""SELECT category, jsonb_agg(metrics ORDER BY name) AS widgets
FROM (SELECT * , default_config AS config
FROM metrics LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index), '[]'::jsonb) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
WHERE deleted_at IS NULL
AND (project_id ISNULL OR (project_id = %(project_id)s AND (is_public OR user_id= %(userId)s)))
) AS metrics
GROUP BY category
ORDER BY ARRAY_POSITION(ARRAY ['custom','overview','errors','performance','resources'], category);""",
{"project_id": project_id, "userId": user_id})
cur.execute(pg_query)
rows = cur.fetchall()
for r in rows:
r["description"] = CATEGORY_DESCRIPTION.get(r["category"].lower(), "")
for w in r["widgets"]:
w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
for s in w["series"]:
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
return helper.list_to_camel_case(rows)
def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema):
with pg_client.PostgresClient() as cur:
@ -87,13 +50,23 @@ def get_dashboard(project_id, user_id, dashboard_id):
pg_query = """SELECT dashboards.*, all_metric_widgets.widgets AS widgets
FROM dashboards
LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(raw_metrics), '[]') AS widgets
FROM (SELECT dashboard_widgets.*, metrics.*, metric_series.series
FROM (SELECT dashboard_widgets.*,
metrics.name, metrics.edited_at,metrics.metric_of,
metrics.view_type,metrics.thumbnail,metrics.metric_type,
metrics.metric_format,metrics.metric_value,metrics.default_config,
metric_series.series
FROM metrics
INNER JOIN dashboard_widgets USING (metric_id)
LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(metric_series.* ORDER BY index),'[]') AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
INNER JOIN dashboard_widgets USING (metric_id)
LEFT JOIN LATERAL (
SELECT COALESCE(JSONB_AGG(metric_series.* ORDER BY index),'[]') AS series
FROM (SELECT metric_series.name,
metric_series.index,
metric_series.metric_id,
metric_series.series_id,
metric_series.created_at
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL) AS metric_series
) AS metric_series ON (TRUE)
WHERE dashboard_widgets.dashboard_id = dashboards.dashboard_id
AND metrics.deleted_at ISNULL
@ -113,6 +86,7 @@ def get_dashboard(project_id, user_id, dashboard_id):
w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
w["config"]["col"] = w["default_config"]["col"]
w["config"]["row"] = w["default_config"]["row"]
w.pop("default_config")
for s in w["series"]:
s["created_at"] = TimeUTC.datetime_to_timestamp(s["created_at"])
return helper.dict_to_camel_case(row)
@ -140,17 +114,19 @@ def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashbo
row = cur.fetchone()
offset = row["count"]
pg_query = f"""UPDATE dashboards
SET name = %(name)s,
SET name = %(name)s,
description= %(description)s
{", is_public = %(is_public)s" if data.is_public is not None else ""}
{", is_pinned = %(is_pinned)s" if data.is_pinned is not None else ""}
WHERE dashboards.project_id = %(projectId)s
WHERE dashboards.project_id = %(projectId)s
AND dashboard_id = %(dashboard_id)s
AND (dashboards.user_id = %(userId)s OR is_public)"""
AND (dashboards.user_id = %(userId)s OR is_public)
RETURNING dashboard_id,name,description,is_public,created_at;"""
if data.metrics is not None and len(data.metrics) > 0:
pg_query = f"""WITH dash AS ({pg_query})
INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
VALUES {",".join([f"(%(dashboard_id)s, %(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])};"""
INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
VALUES {",".join([f"(%(dashboard_id)s, %(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])}
RETURNING dash.*;"""
for i, m in enumerate(data.metrics):
params[f"metric_id_{i}"] = m
# params[f"config_{i}"] = schemas.AddWidgetToDashboardPayloadSchema.schema() \
@ -160,8 +136,10 @@ def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashbo
params[f"config_{i}"] = json.dumps({"position": i + offset})
cur.execute(cur.mogrify(pg_query, params))
return get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id)
row = cur.fetchone()
if row:
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
return helper.dict_to_camel_case(row)
def get_widget(project_id, user_id, dashboard_id, widget_id):
@ -243,86 +221,18 @@ def pin_dashboard(project_id, user_id, dashboard_id):
return helper.dict_to_camel_case(row)
def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.CreateCustomMetricsSchema):
def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.CreateCardSchema):
metric_id = custom_metrics.create(project_id=project_id, user_id=user_id, data=data, dashboard=True)
return add_widget(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id,
data=schemas.AddWidgetToDashboardPayloadSchema(metricId=metric_id))
PREDEFINED = {schemas.TemplatePredefinedKeys.count_sessions: metrics.get_processed_sessions,
schemas.TemplatePredefinedKeys.avg_image_load_time: metrics.get_application_activity_avg_image_load_time,
schemas.TemplatePredefinedKeys.avg_page_load_time: metrics.get_application_activity_avg_page_load_time,
schemas.TemplatePredefinedKeys.avg_request_load_time: metrics.get_application_activity_avg_request_load_time,
schemas.TemplatePredefinedKeys.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start,
schemas.TemplatePredefinedKeys.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel,
schemas.TemplatePredefinedKeys.avg_visited_pages: metrics.get_user_activity_avg_visited_pages,
schemas.TemplatePredefinedKeys.avg_session_duration: metrics.get_user_activity_avg_session_duration,
schemas.TemplatePredefinedKeys.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.TemplatePredefinedKeys.avg_pages_response_time: metrics.get_pages_response_time,
schemas.TemplatePredefinedKeys.avg_response_time: metrics.get_top_metrics_avg_response_time,
schemas.TemplatePredefinedKeys.avg_first_paint: metrics.get_top_metrics_avg_first_paint,
schemas.TemplatePredefinedKeys.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded,
schemas.TemplatePredefinedKeys.avg_till_first_bit: metrics.get_top_metrics_avg_till_first_bit,
schemas.TemplatePredefinedKeys.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive,
schemas.TemplatePredefinedKeys.count_requests: metrics.get_top_metrics_count_requests,
schemas.TemplatePredefinedKeys.avg_time_to_render: metrics.get_time_to_render,
schemas.TemplatePredefinedKeys.avg_used_js_heap_size: metrics.get_memory_consumption,
schemas.TemplatePredefinedKeys.avg_cpu: metrics.get_avg_cpu,
schemas.TemplatePredefinedKeys.avg_fps: metrics.get_avg_fps,
schemas.TemplatePredefinedKeys.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors,
schemas.TemplatePredefinedKeys.domains_errors_4xx: metrics.get_domains_errors_4xx,
schemas.TemplatePredefinedKeys.domains_errors_5xx: metrics.get_domains_errors_5xx,
schemas.TemplatePredefinedKeys.errors_per_domains: metrics.get_errors_per_domains,
schemas.TemplatePredefinedKeys.calls_errors: metrics.get_calls_errors,
schemas.TemplatePredefinedKeys.errors_by_type: metrics.get_errors_per_type,
schemas.TemplatePredefinedKeys.errors_by_origin: metrics.get_resources_by_party,
schemas.TemplatePredefinedKeys.speed_index_by_location: metrics.get_speed_index_location,
schemas.TemplatePredefinedKeys.slowest_domains: metrics.get_slowest_domains,
schemas.TemplatePredefinedKeys.sessions_per_browser: metrics.get_sessions_per_browser,
schemas.TemplatePredefinedKeys.time_to_render: metrics.get_time_to_render,
schemas.TemplatePredefinedKeys.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages,
schemas.TemplatePredefinedKeys.memory_consumption: metrics.get_memory_consumption,
schemas.TemplatePredefinedKeys.cpu_load: metrics.get_avg_cpu,
schemas.TemplatePredefinedKeys.frame_rate: metrics.get_avg_fps,
schemas.TemplatePredefinedKeys.crashes: metrics.get_crashes,
schemas.TemplatePredefinedKeys.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete,
schemas.TemplatePredefinedKeys.pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.TemplatePredefinedKeys.pages_response_time: metrics.get_pages_response_time,
schemas.TemplatePredefinedKeys.pages_response_time_distribution: metrics.get_pages_response_time_distribution,
schemas.TemplatePredefinedKeys.missing_resources: metrics.get_missing_resources_trend,
schemas.TemplatePredefinedKeys.slowest_resources: metrics.get_slowest_resources,
schemas.TemplatePredefinedKeys.resources_fetch_time: metrics.get_resources_loading_time,
schemas.TemplatePredefinedKeys.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
schemas.TemplatePredefinedKeys.resources_count_by_type: metrics.get_resources_count_by_type,
}
def get_predefined_metric(key: schemas.TemplatePredefinedKeys, project_id: int, data: dict):
return PREDEFINED.get(key, lambda *args: None)(project_id=project_id, **data)
def make_chart_metrics(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema):
raw_metric = custom_metrics.get_with_template(metric_id=metric_id, project_id=project_id, user_id=user_id,
include_dashboard=False)
if raw_metric is None:
return None
metric: schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate.parse_obj(raw_metric)
if metric.is_template and metric.predefined_key is None:
return None
if metric.is_template:
return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict())
else:
return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=metric_id, data=data,
metric=raw_metric)
def make_chart_widget(dashboard_id, project_id, user_id, widget_id, data: schemas.CustomMetricChartPayloadSchema):
raw_metric = get_widget(widget_id=widget_id, project_id=project_id, user_id=user_id, dashboard_id=dashboard_id)
if raw_metric is None:
return None
metric = schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate.parse_obj(raw_metric)
if metric.is_template:
return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict())
else:
return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=raw_metric["metricId"],
data=data, metric=raw_metric)
# def make_chart_widget(dashboard_id, project_id, user_id, widget_id, data: schemas.CardChartSchema):
# raw_metric = get_widget(widget_id=widget_id, project_id=project_id, user_id=user_id, dashboard_id=dashboard_id)
# if raw_metric is None:
# return None
# metric = schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate(**raw_metric)
# if metric.is_template:
# return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict())
# else:
# return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=raw_metric["metricId"],
# data=data, metric=raw_metric)

View file

@ -2,6 +2,7 @@ import json
import schemas
from chalicelib.core import sourcemaps, sessions
from chalicelib.utils import errors_helper
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.metrics_helper import __get_step_size
@ -277,7 +278,7 @@ def get_details(project_id, error_id, user_id, **data):
status = cur.fetchone()
if status is not None:
row["stack"] = format_first_stack_frame(status).pop("stack")
row["stack"] = errors_helper.format_first_stack_frame(status).pop("stack")
row["status"] = status.pop("status")
row["parent_error_id"] = status.pop("parent_error_id")
row["favorite"] = status.pop("favorite")
@ -469,9 +470,9 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
sort = __get_sort_key('datetime')
if data.sort is not None:
sort = __get_sort_key(data.sort)
order = schemas.SortOrderType.desc
order = schemas.SortOrderType.desc.value
if data.order is not None:
order = data.order
order = data.order.value
extra_join = ""
params = {
@ -721,19 +722,6 @@ def __status_rank(status):
}.get(status)
def format_first_stack_frame(error):
error["stack"] = sourcemaps.format_payload(error.pop("payload"), truncate_to_first=True)
for s in error["stack"]:
for c in s.get("context", []):
for sci, sc in enumerate(c):
if isinstance(sc, str) and len(sc) > 1000:
c[sci] = sc[:1000]
# convert bytes to string:
if isinstance(s["filename"], bytes):
s["filename"] = s["filename"].decode("utf-8")
return error
def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(

View file

@ -1,16 +1,15 @@
import schemas
from chalicelib.core import issues
from chalicelib.core import metadata
from chalicelib.core import sessions_metas
from typing import Optional
import schemas
from chalicelib.core import autocomplete
from chalicelib.core import issues
from chalicelib.core import sessions_metas
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.event_filter_definition import SupportedFilter, Event
from chalicelib.core import autocomplete
def get_customs_by_sessionId2_pg(session_id, project_id):
def get_customs_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
SELECT
@ -53,50 +52,53 @@ def __get_grouped_clickrage(rows, session_id, project_id):
return rows
def get_by_sessionId2_pg(session_id, project_id, group_clickrage=False):
def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
SELECT
c.*,
'CLICK' AS type
FROM events.clicks AS c
WHERE
c.session_id = %(session_id)s
ORDER BY c.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows = cur.fetchall()
if group_clickrage:
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
cur.execute(cur.mogrify("""
SELECT
i.*,
'INPUT' AS type
FROM events.inputs AS i
WHERE
i.session_id = %(session_id)s
ORDER BY i.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows += cur.fetchall()
cur.execute(cur.mogrify("""\
SELECT
l.*,
l.path AS value,
l.path AS url,
'LOCATION' AS type
FROM events.pages AS l
WHERE
l.session_id = %(session_id)s
ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
rows += cur.fetchall()
rows = []
if event_type is None or event_type == schemas.EventType.click:
cur.execute(cur.mogrify("""\
SELECT
c.*,
'CLICK' AS type
FROM events.clicks AS c
WHERE
c.session_id = %(session_id)s
ORDER BY c.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows += cur.fetchall()
if group_clickrage:
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
if event_type is None or event_type == schemas.EventType.input:
cur.execute(cur.mogrify("""
SELECT
i.*,
'INPUT' AS type
FROM events.inputs AS i
WHERE
i.session_id = %(session_id)s
ORDER BY i.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows += cur.fetchall()
if event_type is None or event_type == schemas.EventType.location:
cur.execute(cur.mogrify("""\
SELECT
l.*,
l.path AS value,
l.path AS url,
'LOCATION' AS type
FROM events.pages AS l
WHERE
l.session_id = %(session_id)s
ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
rows += cur.fetchall()
rows = helper.list_to_camel_case(rows)
rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"]))
return rows
class event_type:
class EventType:
CLICK = Event(ui_type=schemas.EventType.click, table="events.clicks", column="label")
INPUT = Event(ui_type=schemas.EventType.input, table="events.inputs", column="label")
LOCATION = Event(ui_type=schemas.EventType.location, table="events.pages", column="path")
@ -118,46 +120,46 @@ class event_type:
SUPPORTED_TYPES = {
event_type.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CLICK),
query=autocomplete.__generic_query(typename=event_type.CLICK.ui_type)),
event_type.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.INPUT),
query=autocomplete.__generic_query(typename=event_type.INPUT.ui_type)),
event_type.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.LOCATION),
query=autocomplete.__generic_query(
typename=event_type.LOCATION.ui_type)),
event_type.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CUSTOM),
query=autocomplete.__generic_query(typename=event_type.CUSTOM.ui_type)),
event_type.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.REQUEST),
EventType.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK),
query=autocomplete.__generic_query(typename=EventType.CLICK.ui_type)),
EventType.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT),
query=autocomplete.__generic_query(typename=EventType.INPUT.ui_type)),
EventType.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.LOCATION),
query=autocomplete.__generic_query(
typename=event_type.REQUEST.ui_type)),
event_type.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.GRAPHQL),
query=autocomplete.__generic_query(
typename=event_type.GRAPHQL.ui_type)),
event_type.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.STATEACTION),
query=autocomplete.__generic_query(
typename=event_type.STATEACTION.ui_type)),
event_type.ERROR.ui_type: SupportedFilter(get=autocomplete.__search_pg_errors,
query=None),
event_type.METADATA.ui_type: SupportedFilter(get=autocomplete.__search_pg_metadata,
query=None),
# IOS
event_type.CLICK_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CLICK_IOS),
query=autocomplete.__generic_query(
typename=event_type.CLICK_IOS.ui_type)),
event_type.INPUT_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.INPUT_IOS),
query=autocomplete.__generic_query(
typename=event_type.INPUT_IOS.ui_type)),
event_type.VIEW_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.VIEW_IOS),
query=autocomplete.__generic_query(
typename=event_type.VIEW_IOS.ui_type)),
event_type.CUSTOM_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CUSTOM_IOS),
typename=EventType.LOCATION.ui_type)),
EventType.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM),
query=autocomplete.__generic_query(typename=EventType.CUSTOM.ui_type)),
EventType.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST),
query=autocomplete.__generic_query(
typename=EventType.REQUEST.ui_type)),
EventType.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.GRAPHQL),
query=autocomplete.__generic_query(
typename=EventType.GRAPHQL.ui_type)),
EventType.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.STATEACTION),
query=autocomplete.__generic_query(
typename=event_type.CUSTOM_IOS.ui_type)),
event_type.REQUEST_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.REQUEST_IOS),
query=autocomplete.__generic_query(
typename=event_type.REQUEST_IOS.ui_type)),
event_type.ERROR_IOS.ui_type: SupportedFilter(get=autocomplete.__search_pg_errors_ios,
query=None),
typename=EventType.STATEACTION.ui_type)),
EventType.ERROR.ui_type: SupportedFilter(get=autocomplete.__search_errors,
query=None),
EventType.METADATA.ui_type: SupportedFilter(get=autocomplete.__search_metadata,
query=None),
# IOS
EventType.CLICK_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK_IOS),
query=autocomplete.__generic_query(
typename=EventType.CLICK_IOS.ui_type)),
EventType.INPUT_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT_IOS),
query=autocomplete.__generic_query(
typename=EventType.INPUT_IOS.ui_type)),
EventType.VIEW_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.VIEW_IOS),
query=autocomplete.__generic_query(
typename=EventType.VIEW_IOS.ui_type)),
EventType.CUSTOM_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM_IOS),
query=autocomplete.__generic_query(
typename=EventType.CUSTOM_IOS.ui_type)),
EventType.REQUEST_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST_IOS),
query=autocomplete.__generic_query(
typename=EventType.REQUEST_IOS.ui_type)),
EventType.ERROR_IOS.ui_type: SupportedFilter(get=autocomplete.__search_errors_ios,
query=None),
}
@ -165,7 +167,7 @@ def get_errors_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
FROM {event_type.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
FROM {EventType.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
errors = cur.fetchall()
@ -182,11 +184,9 @@ def search(text, event_type, project_id, source, key):
rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source)
# for IOS events autocomplete
# if event_type + "_IOS" in SUPPORTED_TYPES.keys():
# rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
# source=source)
# rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,source=source)
elif event_type + "_IOS" in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
source=source)
rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, source=source)
elif event_type in sessions_metas.SUPPORTED_TYPES.keys():
return sessions_metas.search(text, event_type, project_id)
elif event_type.endswith("_IOS") \

View file

@ -7,8 +7,8 @@ def get_customs_by_sessionId(session_id, project_id):
cur.execute(cur.mogrify(f"""\
SELECT
c.*,
'{events.event_type.CUSTOM_IOS.ui_type}' AS type
FROM {events.event_type.CUSTOM_IOS.table} AS c
'{events.EventType.CUSTOM_IOS.ui_type}' AS type
FROM {events.EventType.CUSTOM_IOS.table} AS c
WHERE
c.session_id = %(session_id)s
ORDER BY c.timestamp;""",
@ -23,8 +23,8 @@ def get_by_sessionId(session_id, project_id):
cur.execute(cur.mogrify(f"""
SELECT
c.*,
'{events.event_type.CLICK_IOS.ui_type}' AS type
FROM {events.event_type.CLICK_IOS.table} AS c
'{events.EventType.CLICK_IOS.ui_type}' AS type
FROM {events.EventType.CLICK_IOS.table} AS c
WHERE
c.session_id = %(session_id)s
ORDER BY c.timestamp;""",
@ -35,8 +35,8 @@ def get_by_sessionId(session_id, project_id):
cur.execute(cur.mogrify(f"""
SELECT
i.*,
'{events.event_type.INPUT_IOS.ui_type}' AS type
FROM {events.event_type.INPUT_IOS.table} AS i
'{events.EventType.INPUT_IOS.ui_type}' AS type
FROM {events.EventType.INPUT_IOS.table} AS i
WHERE
i.session_id = %(session_id)s
ORDER BY i.timestamp;""",
@ -46,8 +46,8 @@ def get_by_sessionId(session_id, project_id):
cur.execute(cur.mogrify(f"""
SELECT
v.*,
'{events.event_type.VIEW_IOS.ui_type}' AS type
FROM {events.event_type.VIEW_IOS.table} AS v
'{events.EventType.VIEW_IOS.ui_type}' AS type
FROM {events.EventType.VIEW_IOS.table} AS v
WHERE
v.session_id = %(session_id)s
ORDER BY v.timestamp;""", {"project_id": project_id, "session_id": session_id}))
@ -61,7 +61,7 @@ def get_crashes_by_session_id(session_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""
SELECT cr.*,uc.*, cr.timestamp - s.start_ts AS time
FROM {events.event_type.ERROR_IOS.table} AS cr INNER JOIN public.crashes_ios AS uc USING (crash_id) INNER JOIN public.sessions AS s USING (session_id)
FROM {events.EventType.ERROR_IOS.table} AS cr INNER JOIN public.crashes_ios AS uc USING (crash_id) INNER JOIN public.sessions AS s USING (session_id)
WHERE
cr.session_id = %(session_id)s
ORDER BY timestamp;""", {"session_id": session_id}))

View file

@ -1,16 +1,9 @@
import json
from typing import List
import chalicelib.utils.helper
import schemas
from chalicelib.core import significance, sessions
from chalicelib.utils import dev
from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
REMOVE_KEYS = ["key", "_key", "startDate", "endDate"]
ALLOW_UPDATE_FOR = ["name", "filter"]
from chalicelib.core import significance
from chalicelib.utils import helper
from chalicelib.utils import sql_helper as sh
def filter_stages(stages: List[schemas._SessionSearchEventSchema]):
@ -25,10 +18,6 @@ def __parse_events(f_events: List[dict]):
return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events]
def __unparse_events(f_events: List[schemas._SessionSearchEventSchema]):
return [e.dict() for e in f_events]
def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]):
if f_events is None:
return
@ -39,220 +28,15 @@ def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]):
if not isinstance(e.value, list):
e.value = [e.value]
is_any = sessions._isAny_opreator(e.operator)
is_any = sh.isAny_opreator(e.operator)
if not is_any and isinstance(e.value, list) and len(e.value) == 0:
continue
events.append(e)
return events
def __transform_old_funnels(events):
for e in events:
if not isinstance(e.get("value"), list):
e["value"] = [e["value"]]
return events
def create(project_id, user_id, name, filter: schemas.FunnelSearchPayloadSchema, is_public):
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
filter.events = filter_stages(stages=filter.events)
with pg_client.PostgresClient() as cur:
query = cur.mogrify("""\
INSERT INTO public.funnels (project_id, user_id, name, filter,is_public)
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(filter)s::jsonb,%(is_public)s)
RETURNING *;""",
{"user_id": user_id, "project_id": project_id, "name": name,
"filter": json.dumps(filter.dict()),
"is_public": is_public})
cur.execute(
query
)
r = cur.fetchone()
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r = helper.dict_to_camel_case(r)
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
return {"data": r}
def update(funnel_id, user_id, project_id, name=None, filter=None, is_public=None):
s_query = []
if filter is not None:
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
s_query.append("filter = %(filter)s::jsonb")
if name is not None and len(name) > 0:
s_query.append("name = %(name)s")
if is_public is not None:
s_query.append("is_public = %(is_public)s")
if len(s_query) == 0:
return {"errors": ["Nothing to update"]}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
UPDATE public.funnels
SET {" , ".join(s_query)}
WHERE funnel_id=%(funnel_id)s
AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)
RETURNING *;""", {"user_id": user_id, "funnel_id": funnel_id, "name": name,
"filter": json.dumps(filter) if filter is not None else None, "is_public": is_public,
"project_id": project_id})
# print("--------------------")
# print(query)
# print("--------------------")
cur.execute(
query
)
r = cur.fetchone()
if r is None:
return {"errors": ["funnel not found"]}
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r = helper.dict_to_camel_case(r)
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
r["filter"] = helper.old_search_payload_to_flat(r["filter"])
return {"data": r}
def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date=None, details=False):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""\
SELECT funnel_id, project_id, user_id, name, created_at, deleted_at, is_public
{",filter" if details else ""}
FROM public.funnels
WHERE project_id = %(project_id)s
AND funnels.deleted_at IS NULL
AND (funnels.user_id = %(user_id)s OR funnels.is_public);""",
{"project_id": project_id, "user_id": user_id}
)
)
rows = cur.fetchall()
rows = helper.list_to_camel_case(rows)
for row in rows:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
if details:
row["filter"]["events"] = filter_stages(__parse_events(row["filter"]["events"]))
if row.get("filter") is not None and row["filter"].get("events") is not None:
row["filter"]["events"] = __transform_old_funnels(__unparse_events(row["filter"]["events"]))
get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date,
end_date=end_date)
counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(row["filter"]),
project_id=project_id, user_id=None, count_only=True)
row["sessionsCount"] = counts["countSessions"]
row["usersCount"] = counts["countUsers"]
filter_clone = dict(row["filter"])
overview = significance.get_overview(filter_d=row["filter"], project_id=project_id)
row["stages"] = overview["stages"]
row.pop("filter")
row["stagesCount"] = len(row["stages"])
# TODO: ask david to count it alone
row["criticalIssuesCount"] = overview["criticalIssuesCount"]
row["missedConversions"] = 0 if len(row["stages"]) < 2 \
else row["stages"][0]["sessionsCount"] - row["stages"][-1]["sessionsCount"]
row["filter"] = helper.old_search_payload_to_flat(filter_clone)
return rows
def get_possible_issue_types(project_id):
return [{"type": t, "title": chalicelib.utils.helper.get_issue_title(t)} for t in
['click_rage', 'dead_click', 'excessive_scrolling',
'bad_request', 'missing_resource', 'memory', 'cpu',
'slow_resource', 'slow_page_load', 'crash', 'custom_event_error',
'js_error']]
def get_start_end_time(filter_d, range_value, start_date, end_date):
if start_date is not None and end_date is not None:
filter_d["startDate"], filter_d["endDate"] = start_date, end_date
elif range_value is not None and len(range_value) > 0:
filter_d["rangeValue"] = range_value
filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(range_value)
else:
filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(filter_d["rangeValue"])
def delete(project_id, funnel_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
UPDATE public.funnels
SET deleted_at = timezone('utc'::text, now())
WHERE project_id = %(project_id)s
AND funnel_id = %(funnel_id)s
AND (user_id = %(user_id)s OR is_public);""",
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id})
)
return {"data": {"state": "success"}}
def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=None, end_date=None):
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
return sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(f["filter"]), project_id=project_id,
user_id=user_id)
def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema):
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.range_value,
start_date=data.startDate, end_date=data.endDate)
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
return sessions.search_sessions(data=data, project_id=project_id,
user_id=user_id)
def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=f["filter"], project_id=project_id)
insights = helper.list_to_camel_case(insights)
if len(insights) > 0:
# fix: this fix for huge drop count
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
total_drop_due_to_issues = insights[0]["sessionsCount"]
# end fix
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
return {"data": {"stages": insights,
"totalDropDueToIssues": total_drop_due_to_issues}}
def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema):
data.events = filter_stages(__parse_events(data.events))
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
start_date=data.startDate,
end_date=data.endDate)
data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"])
data.events = __fix_stages(data.events)
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
insights = helper.list_to_camel_case(insights)
if len(insights) > 0:
# fix: this fix for huge drop count
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
total_drop_due_to_issues = insights[0]["sessionsCount"]
# end fix
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
return {"data": {"stages": insights,
"totalDropDueToIssues": total_drop_due_to_issues}}
# def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema):
def get_top_insights_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema):
def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema):
data.events = filter_stages(__parse_events(data.events))
data.events = __fix_stages(data.events)
if len(data.events) == 0:
@ -271,37 +55,8 @@ def get_top_insights_on_the_fly_widget(project_id, data: schemas.CustomMetricSer
"totalDropDueToIssues": total_drop_due_to_issues}
def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
return {"data": {
"issues": helper.dict_to_camel_case(significance.get_issues_list(filter_d=f["filter"], project_id=project_id))
}}
def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema):
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
start_date=data.startDate,
end_date=data.endDate)
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
if len(data.events) < 2:
return {"issues": []}
return {
"issues": helper.dict_to_camel_case(
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
last_stage=len(data.events)))}
# def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema):
def get_issues_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema):
def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema):
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) < 0:
@ -311,62 +66,3 @@ def get_issues_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFil
"issues": helper.dict_to_camel_case(
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
last_stage=len(data.events)))}
def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""\
SELECT
*
FROM public.funnels
WHERE project_id = %(project_id)s
AND deleted_at IS NULL
AND funnel_id = %(funnel_id)s
AND (user_id = %(user_id)s OR is_public);""",
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id}
)
)
f = helper.dict_to_camel_case(cur.fetchone())
if f is None:
return None
if f.get("filter") is not None and f["filter"].get("events") is not None:
f["filter"]["events"] = __transform_old_funnels(f["filter"]["events"])
f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"])
f["filter"]["events"] = __parse_events(f["filter"]["events"])
f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
if fix_stages:
f["filter"]["events"] = __fix_stages(f["filter"]["events"])
f["filter"]["events"] = [e.dict() for e in f["filter"]["events"]]
if flatten:
f["filter"] = helper.old_search_payload_to_flat(f["filter"])
return f
def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.FunnelSearchPayloadSchema, range_value=None,
start_date=None, end_date=None):
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
data.startDate = data.startDate if data.startDate is not None else start_date
data.endDate = data.endDate if data.endDate is not None else end_date
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=data.startDate,
end_date=data.endDate)
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data) \
.get("issues", {})
issues = issues.get("significant", []) + issues.get("insignificant", [])
issue = None
for i in issues:
if i.get("issueId", "") == issue_id:
issue = i
break
return {"sessions": sessions.search_sessions(user_id=user_id, project_id=project_id, issue=issue,
data=data) if issue is not None else {"total": 0, "sessions": []},
# "stages": helper.list_to_camel_case(insights),
# "totalDropDueToIssues": total_drop_due_to_issues,
"issue": issue}

View file

@ -1,26 +1,74 @@
from chalicelib.utils import sql_helper as sh
import schemas
from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
def get_by_url(project_id, data):
args = {"startDate": data.get('startDate', TimeUTC.now(delta_days=-30)),
"endDate": data.get('endDate', TimeUTC.now()),
"project_id": project_id, "url": data["url"]}
def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema):
args = {"startDate": data.startDate, "endDate": data.endDate,
"project_id": project_id, "url": data.url}
constraints = ["sessions.project_id = %(project_id)s",
"(url = %(url)s OR path= %(url)s)",
"clicks.timestamp >= %(startDate)s",
"clicks.timestamp <= %(endDate)s",
"start_ts >= %(startDate)s",
"start_ts <= %(endDate)s",
"duration IS NOT NULL"]
query_from = "events.clicks INNER JOIN sessions USING (session_id)"
q_count = "count(1) AS count"
has_click_rage_filter = False
if len(data.filters) > 0:
for i, f in enumerate(data.filters):
if f.type == schemas.FilterType.issue and len(f.value) > 0:
has_click_rage_filter = True
q_count = "max(real_count) AS count,TRUE AS click_rage"
query_from += """INNER JOIN events_common.issues USING (timestamp, session_id)
INNER JOIN issues AS mis USING (issue_id)
INNER JOIN LATERAL (
SELECT COUNT(1) AS real_count
FROM events.clicks AS sc
INNER JOIN sessions as ss USING (session_id)
WHERE ss.project_id = 2
AND (sc.url = %(url)s OR sc.path = %(url)s)
AND sc.timestamp >= %(startDate)s
AND sc.timestamp <= %(endDate)s
AND ss.start_ts >= %(startDate)s
AND ss.start_ts <= %(endDate)s
AND sc.selector = clicks.selector) AS r_clicks ON (TRUE)"""
constraints += ["mis.project_id = %(project_id)s",
"issues.timestamp >= %(startDate)s",
"issues.timestamp <= %(endDate)s"]
f_k = f"issue_value{i}"
args = {**args, **sh.multi_values(f.value, value_key=f_k)}
constraints.append(sh.multi_conditions(f"%({f_k})s = ANY (issue_types)",
f.value, value_key=f_k))
constraints.append(sh.multi_conditions(f"mis.type = %({f_k})s",
f.value, value_key=f_k))
if len(f.filters) > 0:
for j, sf in enumerate(f.filters):
f_k = f"issue_svalue{i}{j}"
args = {**args, **sh.multi_values(sf.value, value_key=f_k)}
if sf.type == schemas.IssueFilterType._on_selector and len(sf.value) > 0:
constraints.append(sh.multi_conditions(f"clicks.selector = %({f_k})s",
sf.value, value_key=f_k))
if data.click_rage and not has_click_rage_filter:
constraints.append("""(issues.session_id IS NULL
OR (issues.timestamp >= %(startDate)s
AND issues.timestamp <= %(endDate)s
AND mis.project_id = %(project_id)s
AND mis.type = 'click_rage'))""")
q_count += ",COALESCE(bool_or(mis.issue_id IS NOT NULL), FALSE) AS click_rage"
query_from += """LEFT JOIN events_common.issues USING (timestamp, session_id)
LEFT JOIN issues AS mis USING (issue_id)"""
with pg_client.PostgresClient() as cur:
query = cur.mogrify("""SELECT selector, count(1) AS count
FROM events.clicks
INNER JOIN sessions USING (session_id)
WHERE project_id = %(project_id)s
AND url = %(url)s
AND timestamp >= %(startDate)s
AND timestamp <= %(endDate)s
AND start_ts >= %(startDate)s
AND start_ts <= %(endDate)s
AND duration IS NOT NULL
GROUP BY selector;""",
args)
query = cur.mogrify(f"""SELECT selector, {q_count}
FROM {query_from}
WHERE {" AND ".join(constraints)}
GROUP BY selector
LIMIT 500;""", args)
# print("---------")
# print(query.decode('UTF-8'))
# print("---------")
try:
cur.execute(query)
except Exception as err:
@ -31,4 +79,4 @@ def get_by_url(project_id, data):
print("--------------------")
raise err
rows = cur.fetchall()
return helper.dict_to_camel_case(rows)
return helper.list_to_camel_case(rows)

View file

@ -24,8 +24,7 @@ class GitHubIntegration(integration_base.BaseIntegration):
integration = self.get()
if integration is None:
return None
token = "*" * (len(integration["token"]) - 4) + integration["token"][-4:]
return {"token": token, "provider": self.provider.lower()}
return {"token": helper.obfuscate(text=integration["token"]), "provider": self.provider.lower()}
def update(self, changes, obfuscate=False):
with pg_client.PostgresClient() as cur:
@ -40,12 +39,14 @@ class GitHubIntegration(integration_base.BaseIntegration):
**changes})
)
w = helper.dict_to_camel_case(cur.fetchone())
if w and w.get("token") and obfuscate:
w["token"] = helper.obfuscate(w["token"])
return w
def _add(self, data):
pass
def add(self, token):
def add(self, token, obfuscate=False):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
@ -56,6 +57,8 @@ class GitHubIntegration(integration_base.BaseIntegration):
"token": token})
)
w = helper.dict_to_camel_case(cur.fetchone())
if w and w.get("token") and obfuscate:
w["token"] = helper.obfuscate(w["token"])
return w
# TODO: make a revoke token call
@ -81,4 +84,4 @@ class GitHubIntegration(integration_base.BaseIntegration):
obfuscate=True
)
else:
return self.add(token=data["token"])
return self.add(token=data["token"], obfuscate=True)

View file

@ -9,49 +9,52 @@ def get_global_integrations_status(tenant_id, user_id, project_id):
SELECT EXISTS((SELECT 1
FROM public.oauth_authentication
WHERE user_id = %(user_id)s
AND provider = 'github')) AS {schemas.IntegrationType.github},
AND provider = 'github')) AS {schemas.IntegrationType.github.value},
EXISTS((SELECT 1
FROM public.jira_cloud
WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.jira},
WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.jira.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='bugsnag')) AS {schemas.IntegrationType.bugsnag},
AND provider='bugsnag')) AS {schemas.IntegrationType.bugsnag.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='cloudwatch')) AS {schemas.IntegrationType.cloudwatch},
AND provider='cloudwatch')) AS {schemas.IntegrationType.cloudwatch.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='datadog')) AS {schemas.IntegrationType.datadog},
AND provider='datadog')) AS {schemas.IntegrationType.datadog.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='newrelic')) AS {schemas.IntegrationType.newrelic},
AND provider='newrelic')) AS {schemas.IntegrationType.newrelic.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='rollbar')) AS {schemas.IntegrationType.rollbar},
AND provider='rollbar')) AS {schemas.IntegrationType.rollbar.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='sentry')) AS {schemas.IntegrationType.sentry},
AND provider='sentry')) AS {schemas.IntegrationType.sentry.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='stackdriver')) AS {schemas.IntegrationType.stackdriver},
AND provider='stackdriver')) AS {schemas.IntegrationType.stackdriver.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='sumologic')) AS {schemas.IntegrationType.sumologic},
AND provider='sumologic')) AS {schemas.IntegrationType.sumologic.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='elasticsearch')) AS {schemas.IntegrationType.elasticsearch},
AND provider='elasticsearch')) AS {schemas.IntegrationType.elasticsearch.value},
EXISTS((SELECT 1
FROM public.webhooks
WHERE type='slack')) AS {schemas.IntegrationType.slack};""",
WHERE type='slack' AND deleted_at ISNULL)) AS {schemas.IntegrationType.slack.value},
EXISTS((SELECT 1
FROM public.webhooks
WHERE type='msteams' AND deleted_at ISNULL)) AS {schemas.IntegrationType.ms_teams.value};""",
{"user_id": user_id, "tenant_id": tenant_id, "project_id": project_id})
)
current_integrations = cur.fetchone()

View file

@ -1,4 +1,8 @@
import re
from typing import Optional
from fastapi import HTTPException
from starlette import status
from chalicelib.core import projects
from chalicelib.utils import pg_client
@ -6,21 +10,37 @@ from chalicelib.utils import pg_client
MAX_INDEXES = 10
def _get_column_names():
def column_names():
return [f"metadata_{i}" for i in range(1, MAX_INDEXES + 1)]
def __exists_by_name(project_id: int, name: str, exclude_index: Optional[int]) -> bool:
with pg_client.PostgresClient() as cur:
constraints = column_names()
if exclude_index:
del constraints[exclude_index - 1]
for i in range(len(constraints)):
constraints[i] += " ILIKE %(name)s"
query = cur.mogrify(f"""SELECT EXISTS(SELECT 1
FROM public.projects
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
AND ({" OR ".join(constraints)})) AS exists;""",
{"project_id": project_id, "name": name})
cur.execute(query=query)
row = cur.fetchone()
return row["exists"]
def get(project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""\
SELECT
{",".join(_get_column_names())}
FROM public.projects
WHERE project_id = %(project_id)s AND deleted_at ISNULL
LIMIT 1;""", {"project_id": project_id})
)
query = cur.mogrify(f"""SELECT {",".join(column_names())}
FROM public.projects
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
LIMIT 1;""", {"project_id": project_id})
cur.execute(query=query)
metas = cur.fetchone()
results = []
if metas is not None:
@ -34,15 +54,12 @@ def get_batch(project_ids):
if project_ids is None or len(project_ids) == 0:
return []
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""\
SELECT
project_id, {",".join(_get_column_names())}
FROM public.projects
WHERE project_id IN %(project_ids)s
AND deleted_at ISNULL;""", {"project_ids": tuple(project_ids)})
)
query = cur.mogrify(f"""SELECT project_id, {",".join(column_names())}
FROM public.projects
WHERE project_id IN %(project_ids)s
AND deleted_at ISNULL;""",
{"project_ids": tuple(project_ids)})
cur.execute(query=query)
full_metas = cur.fetchall()
results = {}
if full_metas is not None and len(full_metas) > 0:
@ -84,17 +101,21 @@ def __edit(project_id, col_index, colname, new_name):
with pg_client.PostgresClient() as cur:
if old_metas[col_index]["key"] != new_name:
cur.execute(cur.mogrify(f"""UPDATE public.projects
SET {colname} = %(value)s
WHERE project_id = %(project_id)s AND deleted_at ISNULL
RETURNING {colname};""",
{"project_id": project_id, "value": new_name}))
query = cur.mogrify(f"""UPDATE public.projects
SET {colname} = %(value)s
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
RETURNING {colname};""",
{"project_id": project_id, "value": new_name})
cur.execute(query=query)
new_name = cur.fetchone()[colname]
old_metas[col_index]["key"] = new_name
return {"data": old_metas[col_index]}
def edit(tenant_id, project_id, index: int, new_name: str):
if __exists_by_name(project_id=project_id, name=new_name, exclude_index=index):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
return __edit(project_id=project_id, col_index=index, colname=index_to_colname(index), new_name=new_name)
@ -127,12 +148,16 @@ def add(tenant_id, project_id, new_name):
index = __get_available_index(project_id=project_id)
if index < 1:
return {"errors": ["maximum allowed metadata reached"]}
if __exists_by_name(project_id=project_id, name=new_name, exclude_index=None):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
with pg_client.PostgresClient() as cur:
colname = index_to_colname(index)
cur.execute(
cur.mogrify(
f"""UPDATE public.projects SET {colname}= %(key)s WHERE project_id =%(project_id)s RETURNING {colname};""",
{"key": new_name, "project_id": project_id}))
query = cur.mogrify(f"""UPDATE public.projects
SET {colname}= %(key)s
WHERE project_id =%(project_id)s
RETURNING {colname};""",
{"key": new_name, "project_id": project_id})
cur.execute(query=query)
col_val = cur.fetchone()[colname]
return {"data": {"key": col_val, "index": index}}
@ -140,21 +165,17 @@ def add(tenant_id, project_id, new_name):
def search(tenant_id, project_id, key, value):
value = value + "%"
s_query = []
for f in _get_column_names():
for f in column_names():
s_query.append(f"CASE WHEN {f}=%(key)s THEN TRUE ELSE FALSE END AS {f}")
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""\
SELECT
{",".join(s_query)}
FROM public.projects
WHERE
project_id = %(project_id)s AND deleted_at ISNULL
LIMIT 1;""",
{"key": key, "project_id": project_id})
)
query = cur.mogrify(f"""SELECT {",".join(s_query)}
FROM public.projects
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
LIMIT 1;""",
{"key": key, "project_id": project_id})
cur.execute(query=query)
all_metas = cur.fetchone()
key = None
for c in all_metas:
@ -163,17 +184,13 @@ def search(tenant_id, project_id, key, value):
break
if key is None:
return {"errors": ["key does not exist"]}
cur.execute(
cur.mogrify(
f"""\
SELECT
DISTINCT "{key}" AS "{key}"
FROM public.sessions
{f'WHERE "{key}"::text ILIKE %(value)s' if value is not None and len(value) > 0 else ""}
ORDER BY "{key}"
LIMIT 20;""",
{"value": value, "project_id": project_id})
)
query = cur.mogrify(f"""SELECT DISTINCT "{key}" AS "{key}"
FROM public.sessions
{f'WHERE "{key}"::text ILIKE %(value)s' if value is not None and len(value) > 0 else ""}
ORDER BY "{key}"
LIMIT 20;""",
{"value": value, "project_id": project_id})
cur.execute(query=query)
value = cur.fetchall()
return {"data": [k[key] for k in value]}
@ -189,14 +206,12 @@ def get_by_session_id(project_id, session_id):
return []
keys = {index_to_colname(k["index"]): k["key"] for k in all_metas}
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""\
select {",".join(keys.keys())}
FROM public.sessions
WHERE project_id= %(project_id)s AND session_id=%(session_id)s;""",
{"session_id": session_id, "project_id": project_id})
)
query = cur.mogrify(f"""SELECT {",".join(keys.keys())}
FROM public.sessions
WHERE project_id= %(project_id)s
AND session_id=%(session_id)s;""",
{"session_id": session_id, "project_id": project_id})
cur.execute(query=query)
session_metas = cur.fetchall()
results = []
for m in session_metas:
@ -211,14 +226,11 @@ def get_keys_by_projects(project_ids):
if project_ids is None or len(project_ids) == 0:
return {}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
f"""\
SELECT
project_id,
{",".join(_get_column_names())}
FROM public.projects
WHERE project_id IN %(project_ids)s AND deleted_at ISNULL;""",
{"project_ids": tuple(project_ids)})
query = cur.mogrify(f"""SELECT project_id,{",".join(column_names())}
FROM public.projects
WHERE project_id IN %(project_ids)s
AND deleted_at ISNULL;""",
{"project_ids": tuple(project_ids)})
cur.execute(query)
rows = cur.fetchall()

View file

@ -1610,7 +1610,7 @@ def get_domains_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
"status_code": 4, **__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
rows = __nested_array_to_dict_array(rows)
rows = __nested_array_to_dict_array(rows, key="host")
neutral = __get_neutral(rows)
rows = __merge_rows_with_neutral(rows, neutral)
@ -1618,7 +1618,7 @@ def get_domains_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
params["status_code"] = 5
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
rows = __nested_array_to_dict_array(rows)
rows = __nested_array_to_dict_array(rows, key="host")
neutral = __get_neutral(rows)
rows = __merge_rows_with_neutral(rows, neutral)
result["5xx"] = rows
@ -1658,7 +1658,7 @@ def __get_domains_errors_4xx_and_5xx(status, project_id, startTimestamp=TimeUTC.
"status_code": status, **__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
rows = __nested_array_to_dict_array(rows)
rows = __nested_array_to_dict_array(rows, key="host")
neutral = __get_neutral(rows)
rows = __merge_rows_with_neutral(rows, neutral)

View file

@ -1,4 +1,8 @@
import json
from typing import Optional
from fastapi import HTTPException
from starlette import status
import schemas
from chalicelib.core import users
@ -6,6 +10,20 @@ from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
def __exists_by_name(name: str, exclude_id: Optional[int]) -> bool:
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT EXISTS(SELECT 1
FROM public.projects
WHERE deleted_at IS NULL
AND name ILIKE %(name)s
{"AND project_id!=%(exclude_id)s" if exclude_id else ""}) AS exists;""",
{"name": name, "exclude_id": exclude_id})
cur.execute(query=query)
row = cur.fetchone()
return row["exists"]
def __update(tenant_id, project_id, changes):
if len(changes.keys()) == 0:
return None
@ -14,29 +32,23 @@ def __update(tenant_id, project_id, changes):
for key in changes.keys():
sub_query.append(f"{helper.key_to_snake_case(key)} = %({key})s")
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""\
UPDATE public.projects
SET
{" ,".join(sub_query)}
WHERE
project_id = %(project_id)s
AND deleted_at ISNULL
RETURNING project_id,name,gdpr;""",
{"project_id": project_id, **changes})
)
query = cur.mogrify(f"""UPDATE public.projects
SET {" ,".join(sub_query)}
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
RETURNING project_id,name,gdpr;""",
{"project_id": project_id, **changes})
cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone())
def __create(tenant_id, name):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""\
INSERT INTO public.projects (name, active)
VALUES (%(name)s,TRUE)
RETURNING project_id;""",
{"name": name})
)
query = cur.mogrify(f"""INSERT INTO public.projects (name, active)
VALUES (%(name)s,TRUE)
RETURNING project_id;""",
{"name": name})
cur.execute(query=query)
project_id = cur.fetchone()["project_id"]
return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True)
@ -66,8 +78,8 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
LIMIT 1) AS stack_integrations ON TRUE"""
query = cur.mogrify(f"""{"SELECT *, first_recorded IS NOT NULL AS recorded FROM (" if recorded else ""}
SELECT s.project_id, s.name, s.project_key, s.save_request_payloads, s.first_recorded_session_at
{extra_projection}
SELECT s.project_id, s.name, s.project_key, s.save_request_payloads, s.first_recorded_session_at,
created_at {extra_projection}
FROM public.projects AS s
{extra_join}
WHERE s.deleted_at IS NULL
@ -79,6 +91,7 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
u_values = []
params = {}
for i, r in enumerate(rows):
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
if r["first_recorded_session_at"] is None:
u_values.append(f"(%(project_id_{i})s,to_timestamp(%(first_recorded_{i})s/1000))")
params[f"project_id_{i}"] = r["project_id"]
@ -91,7 +104,9 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
FROM (VALUES {",".join(u_values)}) AS u(project_id,first_recorded)
WHERE projects.project_id=u.project_id;""", params)
cur.execute(query)
else:
for r in rows:
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
if recording_state and len(rows) > 0:
project_ids = [f'({r["project_id"]})' for r in rows]
query = cur.mogrify(f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last
@ -118,49 +133,53 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=None):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
SELECT
s.project_id,
s.project_key,
s.name,
s.save_request_payloads
{",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_id = %(project_id)s) AS last_recorded_session_at" if include_last_session else ""}
{',s.gdpr' if include_gdpr else ''}
FROM public.projects AS s
where s.project_id =%(project_id)s
AND s.deleted_at IS NULL
LIMIT 1;""",
extra_select = ""
if include_last_session:
extra_select += """,(SELECT max(ss.start_ts)
FROM public.sessions AS ss
WHERE ss.project_id = %(project_id)s) AS last_recorded_session_at"""
if include_gdpr:
extra_select += ",s.gdpr"
query = cur.mogrify(f"""SELECT s.project_id,
s.project_key,
s.name,
s.save_request_payloads
{extra_select}
FROM public.projects AS s
WHERE s.project_id =%(project_id)s
AND s.deleted_at IS NULL
LIMIT 1;""",
{"project_id": project_id})
cur.execute(
query=query
)
cur.execute(query=query)
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def get_project_by_key(tenant_id, project_key, include_last_session=False, include_gdpr=None):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
SELECT
s.project_key,
s.name
{",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_key = %(project_key)s) AS last_recorded_session_at" if include_last_session else ""}
{',s.gdpr' if include_gdpr else ''}
FROM public.projects AS s
where s.project_key =%(project_key)s
AND s.deleted_at IS NULL
LIMIT 1;""",
extra_select = ""
if include_last_session:
extra_select += """,(SELECT max(ss.start_ts)
FROM public.sessions AS ss
WHERE ss.project_key = %(project_key)s) AS last_recorded_session_at"""
if include_gdpr:
extra_select += ",s.gdpr"
query = cur.mogrify(f"""SELECT s.project_key,
s.name
{extra_select}
FROM public.projects AS s
WHERE s.project_key =%(project_key)s
AND s.deleted_at IS NULL
LIMIT 1;""",
{"project_key": project_key})
cur.execute(
query=query
)
cur.execute(query=query)
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authorization=False):
if __exists_by_name(name=data.name, exclude_id=None):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if not skip_authorization:
admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]:
@ -169,6 +188,8 @@ def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authoriza
def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema):
if __exists_by_name(name=data.name, exclude_id=project_id):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
@ -182,95 +203,77 @@ def delete(tenant_id, user_id, project_id):
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
UPDATE public.projects
SET
deleted_at = timezone('utc'::text, now()),
active = FALSE
WHERE
project_id = %(project_id)s;""",
{"project_id": project_id})
)
query = cur.mogrify("""UPDATE public.projects
SET deleted_at = timezone('utc'::text, now()),
active = FALSE
WHERE project_id = %(project_id)s;""",
{"project_id": project_id})
cur.execute(query=query)
return {"data": {"state": "success"}}
def count_by_tenant(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute("""\
SELECT
count(s.project_id)
FROM public.projects AS s
where s.deleted_at IS NULL;""")
return cur.fetchone()["count"]
def get_gdpr(project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT
gdpr
FROM public.projects AS s
where s.project_id =%(project_id)s
AND s.deleted_at IS NULL;""",
{"project_id": project_id})
)
return cur.fetchone()["gdpr"]
query = cur.mogrify("""SELECT gdpr
FROM public.projects AS s
WHERE s.project_id =%(project_id)s
AND s.deleted_at IS NULL;""",
{"project_id": project_id})
cur.execute(query=query)
row = cur.fetchone()["gdpr"]
row["projectId"] = project_id
return row
def edit_gdpr(project_id, gdpr):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
UPDATE public.projects
SET
gdpr = gdpr|| %(gdpr)s
WHERE
project_id = %(project_id)s
AND deleted_at ISNULL
RETURNING gdpr;""",
{"project_id": project_id, "gdpr": json.dumps(gdpr)})
)
return cur.fetchone()["gdpr"]
query = cur.mogrify("""UPDATE public.projects
SET gdpr = gdpr|| %(gdpr)s
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
RETURNING gdpr;""",
{"project_id": project_id, "gdpr": json.dumps(gdpr)})
cur.execute(query=query)
row = cur.fetchone()
if not row:
return {"errors": ["something went wrong"]}
row = row["gdpr"]
row["projectId"] = project_id
return row
def get_internal_project_id(project_key):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT project_id
FROM public.projects
where project_key =%(project_key)s AND deleted_at ISNULL;""",
{"project_key": project_key})
)
query = cur.mogrify("""SELECT project_id
FROM public.projects
WHERE project_key =%(project_key)s
AND deleted_at ISNULL;""",
{"project_key": project_key})
cur.execute(query=query)
row = cur.fetchone()
return row["project_id"] if row else None
def get_project_key(project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT project_key
FROM public.projects
where project_id =%(project_id)s AND deleted_at ISNULL;""",
{"project_id": project_id})
)
query = cur.mogrify("""SELECT project_key
FROM public.projects
WHERE project_id =%(project_id)s
AND deleted_at ISNULL;""",
{"project_id": project_id})
cur.execute(query=query)
project = cur.fetchone()
return project["project_key"] if project is not None else None
def get_capture_status(project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT
sample_rate AS rate, sample_rate=100 AS capture_all
FROM public.projects
where project_id =%(project_id)s AND deleted_at ISNULL;""",
{"project_id": project_id})
)
query = cur.mogrify("""SELECT sample_rate AS rate, sample_rate=100 AS capture_all
FROM public.projects
WHERE project_id =%(project_id)s
AND deleted_at ISNULL;""",
{"project_id": project_id})
cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone())
@ -285,22 +288,22 @@ def update_capture_status(project_id, changes):
if changes.get("captureAll"):
sample_rate = 100
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
UPDATE public.projects
SET sample_rate= %(sample_rate)s
WHERE project_id =%(project_id)s AND deleted_at ISNULL;""",
{"project_id": project_id, "sample_rate": sample_rate})
)
query = cur.mogrify("""UPDATE public.projects
SET sample_rate= %(sample_rate)s
WHERE project_id =%(project_id)s
AND deleted_at ISNULL;""",
{"project_id": project_id, "sample_rate": sample_rate})
cur.execute(query=query)
return changes
def get_projects_ids(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(f"""SELECT s.project_id
FROM public.projects AS s
WHERE s.deleted_at IS NULL
ORDER BY s.project_id;""")
query = f"""SELECT s.project_id
FROM public.projects AS s
WHERE s.deleted_at IS NULL
ORDER BY s.project_id;"""
cur.execute(query=query)
rows = cur.fetchall()
return [r["project_id"] for r in rows]

View file

@ -2,9 +2,11 @@ from typing import List
import schemas
from chalicelib.core import events, metadata, events_ios, \
sessions_mobs, issues, projects, errors, resources, assist, performance_event, sessions_viewed, sessions_favorite, \
sessions_mobs, issues, projects, resources, assist, performance_event, sessions_favorite, \
sessions_devtool, sessions_notes
from chalicelib.utils import errors_helper
from chalicelib.utils import pg_client, helper, metrics_helper
from chalicelib.utils import sql_helper as sh
SESSION_PROJECTION_COLS = """s.project_id,
s.session_id::text AS session_id,
@ -60,7 +62,7 @@ def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_
s.session_id::text AS session_id,
(SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key
{"," if len(extra_query) > 0 else ""}{",".join(extra_query)}
{(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata._get_column_names()]) + ") AS project_metadata") if group_metadata else ''}
{(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''}
FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""}
WHERE s.project_id = %(project_id)s
AND s.session_id = %(session_id)s;""",
@ -84,16 +86,16 @@ def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_
session_id=session_id)
data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id)
else:
data['events'] = events.get_by_sessionId2_pg(project_id=project_id, session_id=session_id,
group_clickrage=True)
data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id,
group_clickrage=True)
all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id)
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
# to keep only the first stack
# limit the number of errors to reduce the response-body size
data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors
data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors
if e['source'] == "js_exception"][:500]
data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id,
session_id=session_id)
data['userEvents'] = events.get_customs_by_session_id(project_id=project_id,
session_id=session_id)
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id)
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id)
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id)
@ -114,67 +116,6 @@ def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_
return None
def __get_sql_operator(op: schemas.SearchEventOperator):
return {
schemas.SearchEventOperator._is: "=",
schemas.SearchEventOperator._is_any: "IN",
schemas.SearchEventOperator._on: "=",
schemas.SearchEventOperator._on_any: "IN",
schemas.SearchEventOperator._is_not: "!=",
schemas.SearchEventOperator._not_on: "!=",
schemas.SearchEventOperator._contains: "ILIKE",
schemas.SearchEventOperator._not_contains: "NOT ILIKE",
schemas.SearchEventOperator._starts_with: "ILIKE",
schemas.SearchEventOperator._ends_with: "ILIKE",
}.get(op, "=")
def __is_negation_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_not,
schemas.SearchEventOperator._not_on,
schemas.SearchEventOperator._not_contains]
def __reverse_sql_operator(op):
return "=" if op == "!=" else "!=" if op == "=" else "ILIKE" if op == "NOT ILIKE" else "NOT ILIKE"
def __get_sql_operator_multiple(op: schemas.SearchEventOperator):
return " IN " if op not in [schemas.SearchEventOperator._is_not, schemas.SearchEventOperator._not_on,
schemas.SearchEventOperator._not_contains] else " NOT IN "
def __get_sql_value_multiple(values):
if isinstance(values, tuple):
return values
return tuple(values) if isinstance(values, list) else (values,)
def _multiple_conditions(condition, values, value_key="value", is_not=False):
query = []
for i in range(len(values)):
k = f"{value_key}_{i}"
query.append(condition.replace(value_key, k))
return "(" + (" AND " if is_not else " OR ").join(query) + ")"
def _multiple_values(values, value_key="value"):
query_values = {}
if values is not None and isinstance(values, list):
for i in range(len(values)):
k = f"{value_key}_{i}"
query_values[k] = values[i]
return query_values
def _isAny_opreator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._on_any, schemas.SearchEventOperator._is_any]
def _isUndefined_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_undefined]
# This function executes the query and return result
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False):
@ -210,9 +151,9 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
elif data.group_by_user:
g_sort = "count(full_sessions)"
if data.order is None:
data.order = schemas.SortOrderType.desc
data.order = schemas.SortOrderType.desc.value
else:
data.order = data.order.upper()
data.order = data.order.value
if data.sort is not None and data.sort != 'sessionsCount':
sort = helper.key_to_snake_case(data.sort)
g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})"
@ -245,7 +186,9 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
full_args)
else:
if data.order is None:
data.order = schemas.SortOrderType.desc
data.order = schemas.SortOrderType.desc.value
else:
data.order = data.order.value
sort = 'session_id'
if data.sort is not None and data.sort != "session_id":
# sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
@ -304,13 +247,13 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType,
metric_of: schemas.TableMetricOfType, metric_value: List):
metric_of: schemas.MetricOfTable, metric_value: List):
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endDate, startTimestamp=data.startDate,
density=density, factor=1, decimal=True))
extra_event = None
if metric_of == schemas.TableMetricOfType.visited_url:
if metric_of == schemas.MetricOfTable.visited_url:
extra_event = "events.pages"
elif metric_of == schemas.TableMetricOfType.issues and len(metric_value) > 0:
elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
operator=schemas.SearchEventOperator._is))
full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False,
@ -353,18 +296,19 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
else:
sessions = cur.fetchone()["count"]
elif metric_type == schemas.MetricType.table:
if isinstance(metric_of, schemas.TableMetricOfType):
if isinstance(metric_of, schemas.MetricOfTable):
main_col = "user_id"
extra_col = ""
extra_where = ""
pre_query = ""
if metric_of == schemas.TableMetricOfType.user_country:
distinct_on = "s.session_id"
if metric_of == schemas.MetricOfTable.user_country:
main_col = "user_country"
elif metric_of == schemas.TableMetricOfType.user_device:
elif metric_of == schemas.MetricOfTable.user_device:
main_col = "user_device"
elif metric_of == schemas.TableMetricOfType.user_browser:
elif metric_of == schemas.MetricOfTable.user_browser:
main_col = "user_browser"
elif metric_of == schemas.TableMetricOfType.issues:
elif metric_of == schemas.MetricOfTable.issues:
main_col = "issue"
extra_col = f", UNNEST(s.issue_types) AS {main_col}"
if len(metric_value) > 0:
@ -374,16 +318,17 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
extra_where.append(f"{main_col} = %({arg_name})s")
full_args[arg_name] = metric_value[i]
extra_where = f"WHERE ({' OR '.join(extra_where)})"
elif metric_of == schemas.TableMetricOfType.visited_url:
elif metric_of == schemas.MetricOfTable.visited_url:
main_col = "path"
extra_col = ", path"
distinct_on += ",path"
main_query = cur.mogrify(f"""{pre_query}
SELECT COUNT(*) AS count, COALESCE(JSONB_AGG(users_sessions) FILTER ( WHERE rn <= 200 ), '[]'::JSONB) AS values
FROM (SELECT {main_col} AS name,
count(full_sessions) AS session_count,
count(DISTINCT session_id) AS session_count,
ROW_NUMBER() OVER (ORDER BY count(full_sessions) DESC) AS rn
FROM (SELECT *
FROM (SELECT DISTINCT ON(s.session_id) s.session_id, s.user_uuid,
FROM (SELECT DISTINCT ON({distinct_on}) s.session_id, s.user_uuid,
s.user_id, s.user_os,
s.user_browser, s.user_device,
s.user_device_type, s.user_country, s.issue_types{extra_col}
@ -420,7 +365,8 @@ def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema):
# this function generates the query and return the generated-query with the dict of query arguments
def search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id, user_id, extra_event=None):
def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue,
project_id, user_id, extra_event=None):
ss_constraints = []
full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate,
"projectId": project_id, "userId": user_id}
@ -438,15 +384,15 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
filter_type = f.type
f.value = helper.values_for_operator(value=f.value, op=f.operator)
f_k = f"f_value{i}"
full_args = {**full_args, **_multiple_values(f.value, value_key=f_k)}
op = __get_sql_operator(f.operator) \
full_args = {**full_args, **sh.multi_values(f.value, value_key=f_k)}
op = sh.get_sql_operator(f.operator) \
if filter_type not in [schemas.FilterType.events_count] else f.operator
is_any = _isAny_opreator(f.operator)
is_undefined = _isUndefined_operator(f.operator)
is_any = sh.isAny_opreator(f.operator)
is_undefined = sh.isUndefined_operator(f.operator)
if not is_any and not is_undefined and len(f.value) == 0:
continue
is_not = False
if __is_negation_operator(f.operator):
if sh.is_negation_operator(f.operator):
is_not = True
if filter_type == schemas.FilterType.user_browser:
if is_any:
@ -454,9 +400,10 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.user_browser IS NOT NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios]:
if is_any:
@ -464,9 +411,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.user_os IS NOT NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios]:
if is_any:
@ -474,9 +421,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.user_device IS NOT NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios]:
if is_any:
@ -484,9 +431,10 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.user_country IS NOT NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_source]:
if is_any:
@ -497,11 +445,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.utm_source IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f's.utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f'ms.utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_medium]:
if is_any:
extra_constraints.append('s.utm_medium IS NOT NULL')
@ -511,11 +459,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.utm_medium IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f's.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f'ms.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_campaign]:
if is_any:
extra_constraints.append('s.utm_campaign IS NOT NULL')
@ -525,11 +473,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.utm_campaign IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f's.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f'ms.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.duration:
if len(f.value) > 0 and f.value[0] is not None:
@ -546,8 +494,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
extra_constraints.append('s.base_referrer IS NOT NULL')
else:
extra_constraints.append(
_multiple_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
elif filter_type == events.event_type.METADATA.ui_type:
sh.multi_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == events.EventType.METADATA.ui_type:
# get metadata list only if you need it
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
@ -561,11 +510,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append(f"ms.{metadata.index_to_colname(meta_keys[f.source])} IS NULL")
else:
extra_constraints.append(
_multiple_conditions(
sh.multi_conditions(
f"s.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text",
f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(
sh.multi_conditions(
f"ms.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text",
f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
@ -577,9 +526,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.user_id IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f"s.user_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f"s.user_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.user_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f"ms.user_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_ios]:
if is_any:
@ -590,11 +541,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.user_anonymous_id IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f"s.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
if is_any:
extra_constraints.append('s.rev_id IS NOT NULL')
@ -604,40 +555,58 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.rev_id IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f"s.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f"s.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f"ms.rev_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.platform:
# op = __get_sql_operator(f.operator)
# op = __ sh.get_sql_operator(f.operator)
extra_constraints.append(
_multiple_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.issue:
if is_any:
extra_constraints.append("array_length(s.issue_types, 1) > 0")
ss_constraints.append("array_length(ms.issue_types, 1) > 0")
else:
extra_constraints.append(
_multiple_conditions(f"%({f_k})s {op} ANY (s.issue_types)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"%({f_k})s {op} ANY (s.issue_types)", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not,
value_key=f_k))
# search sessions with click_rage on a specific selector
if len(f.filters) > 0 and schemas.IssueType.click_rage in f.value:
for j, sf in enumerate(f.filters):
if sf.operator == schemas.IssueFilterOperator._on_selector:
f_k = f"f_value{i}_{j}"
full_args = {**full_args, **sh.multi_values(sf.value, value_key=f_k)}
extra_constraints += ["mc.timestamp>=%(startDate)s",
"mc.timestamp<=%(endDate)s",
"mis.type='click_rage'",
sh.multi_conditions(f"mc.selector=%({f_k})s",
sf.value, is_not=is_not,
value_key=f_k)]
extra_from += """INNER JOIN events.clicks AS mc USING(session_id)
INNER JOIN events_common.issues USING (session_id,timestamp)
INNER JOIN public.issues AS mis USING (issue_id)\n"""
elif filter_type == schemas.FilterType.events_count:
extra_constraints.append(
_multiple_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
# ---------------------------------------------------------------------------
if len(data.events) > 0:
valid_events_count = 0
for event in data.events:
is_any = _isAny_opreator(event.operator)
is_any = sh.isAny_opreator(event.operator)
if not isinstance(event.value, list):
event.value = [event.value]
if __is_valid_event(is_any=is_any, event=event):
@ -649,16 +618,16 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
events_joiner = " UNION " if or_events else " INNER JOIN LATERAL "
for i, event in enumerate(data.events):
event_type = event.type
is_any = _isAny_opreator(event.operator)
is_any = sh.isAny_opreator(event.operator)
if not isinstance(event.value, list):
event.value = [event.value]
if not __is_valid_event(is_any=is_any, event=event):
continue
op = __get_sql_operator(event.operator)
op = sh.get_sql_operator(event.operator)
is_not = False
if __is_negation_operator(event.operator):
if sh.is_negation_operator(event.operator):
is_not = True
op = __reverse_sql_operator(op)
op = sh.reverse_sql_operator(op)
if event_index == 0 or or_events:
event_from = "%s INNER JOIN public.sessions AS ms USING (session_id)"
event_where = ["ms.project_id = %(projectId)s", "main.timestamp >= %(startDate)s",
@ -678,116 +647,120 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
if event.type != schemas.PerformanceEventType.time_between_events:
event.value = helper.values_for_operator(value=event.value, op=event.operator)
full_args = {**full_args,
**_multiple_values(event.value, value_key=e_k),
**_multiple_values(event.source, value_key=s_k)}
**sh.multi_values(event.value, value_key=e_k),
**sh.multi_values(event.source, value_key=s_k)}
if event_type == events.event_type.CLICK.ui_type:
event_from = event_from % f"{events.event_type.CLICK.table} AS main "
if event_type == events.EventType.CLICK.ui_type:
event_from = event_from % f"{events.EventType.CLICK.table} AS main "
if not is_any:
if event.operator == schemas.ClickEventExtraOperator._on_selector:
event_where.append(
sh.multi_conditions(f"main.selector = %({e_k})s", event.value, value_key=e_k))
else:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.CLICK.column} {op} %({e_k})s", event.value,
value_key=e_k))
elif event_type == events.EventType.INPUT.ui_type:
event_from = event_from % f"{events.EventType.INPUT.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.CLICK.column} {op} %({e_k})s", event.value,
value_key=e_k))
elif event_type == events.event_type.INPUT.ui_type:
event_from = event_from % f"{events.event_type.INPUT.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.INPUT.column} {op} %({e_k})s", event.value,
value_key=e_k))
sh.multi_conditions(f"main.{events.EventType.INPUT.column} {op} %({e_k})s", event.value,
value_key=e_k))
if event.source is not None and len(event.source) > 0:
event_where.append(_multiple_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key=f"custom{i}"))
full_args = {**full_args, **_multiple_values(event.source, value_key=f"custom{i}")}
event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key=f"custom{i}"))
full_args = {**full_args, **sh.multi_values(event.source, value_key=f"custom{i}")}
elif event_type == events.event_type.LOCATION.ui_type:
event_from = event_from % f"{events.event_type.LOCATION.table} AS main "
elif event_type == events.EventType.LOCATION.ui_type:
event_from = event_from % f"{events.EventType.LOCATION.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.LOCATION.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.CUSTOM.ui_type:
event_from = event_from % f"{events.event_type.CUSTOM.table} AS main "
sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.CUSTOM.ui_type:
event_from = event_from % f"{events.EventType.CUSTOM.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.CUSTOM.column} {op} %({e_k})s", event.value,
value_key=e_k))
elif event_type == events.event_type.REQUEST.ui_type:
event_from = event_from % f"{events.event_type.REQUEST.table} AS main "
sh.multi_conditions(f"main.{events.EventType.CUSTOM.column} {op} %({e_k})s", event.value,
value_key=e_k))
elif event_type == events.EventType.REQUEST.ui_type:
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k})s", event.value,
value_key=e_k))
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s", event.value,
value_key=e_k))
# elif event_type == events.event_type.GRAPHQL.ui_type:
# event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main "
# if not is_any:
# event_where.append(
# _multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k})s", event.value,
# value_key=e_k))
elif event_type == events.event_type.STATEACTION.ui_type:
event_from = event_from % f"{events.event_type.STATEACTION.table} AS main "
elif event_type == events.EventType.STATEACTION.ui_type:
event_from = event_from % f"{events.EventType.STATEACTION.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.STATEACTION.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.ERROR.ui_type:
event_from = event_from % f"{events.event_type.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)"
sh.multi_conditions(f"main.{events.EventType.STATEACTION.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.ERROR.ui_type:
event_from = event_from % f"{events.EventType.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)"
event.source = list(set(event.source))
if not is_any and event.value not in [None, "*", ""]:
event_where.append(
_multiple_conditions(f"(main1.message {op} %({e_k})s OR main1.name {op} %({e_k})s)",
event.value, value_key=e_k))
sh.multi_conditions(f"(main1.message {op} %({e_k})s OR main1.name {op} %({e_k})s)",
event.value, value_key=e_k))
if len(event.source) > 0 and event.source[0] not in [None, "*", ""]:
event_where.append(_multiple_conditions(f"main1.source = %({s_k})s", event.source, value_key=s_k))
event_where.append(sh.multi_conditions(f"main1.source = %({s_k})s", event.source, value_key=s_k))
# ----- IOS
elif event_type == events.event_type.CLICK_IOS.ui_type:
event_from = event_from % f"{events.event_type.CLICK_IOS.table} AS main "
elif event_type == events.EventType.CLICK_IOS.ui_type:
event_from = event_from % f"{events.EventType.CLICK_IOS.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.CLICK_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
sh.multi_conditions(f"main.{events.EventType.CLICK_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.INPUT_IOS.ui_type:
event_from = event_from % f"{events.event_type.INPUT_IOS.table} AS main "
elif event_type == events.EventType.INPUT_IOS.ui_type:
event_from = event_from % f"{events.EventType.INPUT_IOS.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.INPUT_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
sh.multi_conditions(f"main.{events.EventType.INPUT_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
if event.source is not None and len(event.source) > 0:
event_where.append(_multiple_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key="custom{i}"))
full_args = {**full_args, **_multiple_values(event.source, f"custom{i}")}
elif event_type == events.event_type.VIEW_IOS.ui_type:
event_from = event_from % f"{events.event_type.VIEW_IOS.table} AS main "
event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key="custom{i}"))
full_args = {**full_args, **sh.multi_values(event.source, f"custom{i}")}
elif event_type == events.EventType.VIEW_IOS.ui_type:
event_from = event_from % f"{events.EventType.VIEW_IOS.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.VIEW_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.CUSTOM_IOS.ui_type:
event_from = event_from % f"{events.event_type.CUSTOM_IOS.table} AS main "
sh.multi_conditions(f"main.{events.EventType.VIEW_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.CUSTOM_IOS.ui_type:
event_from = event_from % f"{events.EventType.CUSTOM_IOS.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.CUSTOM_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.REQUEST_IOS.ui_type:
event_from = event_from % f"{events.event_type.REQUEST_IOS.table} AS main "
sh.multi_conditions(f"main.{events.EventType.CUSTOM_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.REQUEST_IOS.ui_type:
event_from = event_from % f"{events.EventType.REQUEST_IOS.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.REQUEST_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.ERROR_IOS.ui_type:
event_from = event_from % f"{events.event_type.ERROR_IOS.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_id)"
sh.multi_conditions(f"main.{events.EventType.REQUEST_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.ERROR_IOS.ui_type:
event_from = event_from % f"{events.EventType.ERROR_IOS.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_id)"
if not is_any and event.value not in [None, "*", ""]:
event_where.append(
_multiple_conditions(f"(main1.reason {op} %({e_k})s OR main1.name {op} %({e_k})s)",
event.value, value_key=e_k))
sh.multi_conditions(f"(main1.reason {op} %({e_k})s OR main1.name {op} %({e_k})s)",
event.value, value_key=e_k))
elif event_type == schemas.PerformanceEventType.fetch_failed:
event_from = event_from % f"{events.event_type.REQUEST.table} AS main "
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k})s",
event.value, value_key=e_k))
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s",
event.value, value_key=e_k))
col = performance_event.get_col(event_type)
colname = col["column"]
event_where.append(f"main.{colname} = FALSE")
@ -801,7 +774,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
# colname = col["column"]
# tname = "main"
# e_k += "_custom"
# full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)}
# full_args = {**full_args, **_ sh.multiple_values(event.source, value_key=e_k)}
# event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " +
# _multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
# event.source, value_key=e_k))
@ -811,7 +784,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
schemas.PerformanceEventType.location_avg_cpu_load,
schemas.PerformanceEventType.location_avg_memory_usage
]:
event_from = event_from % f"{events.event_type.LOCATION.table} AS main "
event_from = event_from % f"{events.EventType.LOCATION.table} AS main "
col = performance_event.get_col(event_type)
colname = col["column"]
tname = "main"
@ -822,16 +795,16 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
f"{tname}.timestamp <= %(endDate)s"]
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.LOCATION.column} {op} %({e_k})s",
event.value, value_key=e_k))
sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s",
event.value, value_key=e_k))
e_k += "_custom"
full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)}
full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)}
event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " +
_multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator.value} %({e_k})s",
event.source, value_key=e_k))
elif event_type == schemas.PerformanceEventType.time_between_events:
event_from = event_from % f"{getattr(events.event_type, event.value[0].type).table} AS main INNER JOIN {getattr(events.event_type, event.value[1].type).table} AS main2 USING(session_id) "
event_from = event_from % f"{getattr(events.EventType, event.value[0].type).table} AS main INNER JOIN {getattr(events.EventType, event.value[1].type).table} AS main2 USING(session_id) "
if not isinstance(event.value[0].value, list):
event.value[0].value = [event.value[0].value]
if not isinstance(event.value[1].value, list):
@ -843,98 +816,99 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
e_k1 = e_k + "_e1"
e_k2 = e_k + "_e2"
full_args = {**full_args,
**_multiple_values(event.value[0].value, value_key=e_k1),
**_multiple_values(event.value[1].value, value_key=e_k2)}
s_op = __get_sql_operator(event.value[0].operator)
**sh.multi_values(event.value[0].value, value_key=e_k1),
**sh.multi_values(event.value[1].value, value_key=e_k2)}
s_op = sh.get_sql_operator(event.value[0].operator)
event_where += ["main2.timestamp >= %(startDate)s", "main2.timestamp <= %(endDate)s"]
if event_index > 0 and not or_events:
event_where.append("main2.session_id=event_0.session_id")
is_any = _isAny_opreator(event.value[0].operator)
is_any = sh.isAny_opreator(event.value[0].operator)
if not is_any:
event_where.append(
_multiple_conditions(
f"main.{getattr(events.event_type, event.value[0].type).column} {s_op} %({e_k1})s",
sh.multi_conditions(
f"main.{getattr(events.EventType, event.value[0].type).column} {s_op} %({e_k1})s",
event.value[0].value, value_key=e_k1))
s_op = __get_sql_operator(event.value[1].operator)
is_any = _isAny_opreator(event.value[1].operator)
s_op = sh.get_sql_operator(event.value[1].operator)
is_any = sh.isAny_opreator(event.value[1].operator)
if not is_any:
event_where.append(
_multiple_conditions(
f"main2.{getattr(events.event_type, event.value[1].type).column} {s_op} %({e_k2})s",
sh.multi_conditions(
f"main2.{getattr(events.EventType, event.value[1].type).column} {s_op} %({e_k2})s",
event.value[1].value, value_key=e_k2))
e_k += "_custom"
full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)}
full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)}
event_where.append(
_multiple_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
sh.multi_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator.value} %({e_k})s",
event.source, value_key=e_k))
elif event_type == schemas.EventType.request_details:
event_from = event_from % f"{events.event_type.REQUEST.table} AS main "
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
apply = False
for j, f in enumerate(event.filters):
is_any = _isAny_opreator(f.operator)
is_any = sh.isAny_opreator(f.operator)
if is_any or len(f.value) == 0:
continue
f.value = helper.values_for_operator(value=f.value, op=f.operator)
op = __get_sql_operator(f.operator)
op = sh.get_sql_operator(f.operator)
e_k_f = e_k + f"_fetch{j}"
full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)}
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
if f.type == schemas.FetchFilterType._url:
event_where.append(
_multiple_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k_f})s::text",
f.value, value_key=e_k_f))
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k_f})s::text",
f.value, value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._status_code:
event_where.append(
_multiple_conditions(f"main.status_code {f.operator} %({e_k_f})s::integer", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.status_code {f.operator.value} %({e_k_f})s::integer", f.value,
value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._method:
event_where.append(
_multiple_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._duration:
event_where.append(
_multiple_conditions(f"main.duration {f.operator} %({e_k_f})s::integer", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.duration {f.operator.value} %({e_k_f})s::integer", f.value,
value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._request_body:
event_where.append(
_multiple_conditions(f"main.request_body {op} %({e_k_f})s::text", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.request_body {op} %({e_k_f})s::text", f.value,
value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._response_body:
event_where.append(
_multiple_conditions(f"main.response_body {op} %({e_k_f})s::text", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.response_body {op} %({e_k_f})s::text", f.value,
value_key=e_k_f))
apply = True
else:
print(f"undefined FETCH filter: {f.type}")
if not apply:
continue
elif event_type == schemas.EventType.graphql:
event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main "
event_from = event_from % f"{events.EventType.GRAPHQL.table} AS main "
for j, f in enumerate(event.filters):
is_any = _isAny_opreator(f.operator)
is_any = sh.isAny_opreator(f.operator)
if is_any or len(f.value) == 0:
continue
f.value = helper.values_for_operator(value=f.value, op=f.operator)
op = __get_sql_operator(f.operator)
op = sh.get_sql_operator(f.operator)
e_k_f = e_k + f"_graphql{j}"
full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)}
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
if f.type == schemas.GraphqlFilterType._name:
event_where.append(
_multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k_f})s", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value,
value_key=e_k_f))
elif f.type == schemas.GraphqlFilterType._method:
event_where.append(
_multiple_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
elif f.type == schemas.GraphqlFilterType._request_body:
event_where.append(
_multiple_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
elif f.type == schemas.GraphqlFilterType._response_body:
event_where.append(
_multiple_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
else:
print(f"undefined GRAPHQL filter: {f.type}")
else:
@ -1005,7 +979,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
# b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')")
if errors_only:
extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
extra_from += f" INNER JOIN {events.EventType.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
extra_constraints.append("ser.source = 'js_exception'")
extra_constraints.append("ser.project_id = %(project_id)s")
# if error_status != schemas.ErrorStatus.all:
@ -1114,39 +1088,6 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
return results
def search_by_issue(user_id, issue, project_id, start_date, end_date):
constraints = ["s.project_id = %(projectId)s",
"p_issues.context_string = %(issueContextString)s",
"p_issues.type = %(issueType)s"]
if start_date is not None:
constraints.append("start_ts >= %(startDate)s")
if end_date is not None:
constraints.append("start_ts <= %(endDate)s")
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""SELECT DISTINCT ON(favorite_sessions.session_id, s.session_id) {SESSION_PROJECTION_COLS}
FROM public.sessions AS s
INNER JOIN events_common.issues USING (session_id)
INNER JOIN public.issues AS p_issues USING (issue_id)
LEFT JOIN (SELECT user_id, session_id
FROM public.user_favorite_sessions
WHERE user_id = %(userId)s) AS favorite_sessions
USING (session_id)
WHERE {" AND ".join(constraints)}
ORDER BY s.session_id DESC;""",
{
"issueContextString": issue["contextString"],
"issueType": issue["type"], "userId": user_id,
"projectId": project_id,
"startDate": start_date,
"endDate": end_date
}))
rows = cur.fetchall()
return helper.list_to_camel_case(rows)
def get_user_sessions(project_id, user_id, start_date, end_date):
with pg_client.PostgresClient() as cur:
constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"]
@ -1253,8 +1194,9 @@ def delete_sessions_by_user_ids(project_id, user_ids):
def count_all():
with pg_client.PostgresClient(unlimited_query=True) as cur:
row = cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions")
return row.get("count", 0)
cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions")
row = cur.fetchone()
return row.get("count", 0) if row else 0
def session_exists(project_id, session_id):
@ -1262,7 +1204,8 @@ def session_exists(project_id, session_id):
query = cur.mogrify("""SELECT 1
FROM public.sessions
WHERE session_id=%(session_id)s
AND project_id=%(project_id)s""",
AND project_id=%(project_id)s
LIMIT 1;""",
{"project_id": project_id, "session_id": session_id})
cur.execute(query)
row = cur.fetchone()

View file

@ -13,9 +13,11 @@ def __get_devtools_keys(project_id, session_id):
]
def get_urls(session_id, project_id):
def get_urls(session_id, project_id, check_existence: bool = True):
results = []
for k in __get_devtools_keys(project_id=project_id, session_id=session_id):
if check_existence and not s3.exists(bucket=config("sessions_bucket"), key=k):
continue
results.append(s3.client.generate_presigned_url(
'get_object',
Params={'Bucket': config("sessions_bucket"), 'Key': k},

View file

@ -36,15 +36,15 @@ def favorite_session(context: schemas.CurrentContext, project_id, session_id):
return add_favorite_session(context=context, project_id=project_id, session_id=session_id)
def favorite_session_exists(user_id, session_id):
def favorite_session_exists(session_id, user_id=None):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT session_id
f"""SELECT session_id
FROM public.user_favorite_sessions
WHERE
user_id = %(userId)s
AND session_id = %(session_id)s""",
session_id = %(session_id)s
{'AND user_id = %(userId)s' if user_id else ''};""",
{"userId": user_id, "session_id": session_id})
)
r = cur.fetchone()

View file

@ -18,9 +18,11 @@ def __get_mob_keys_deprecated(session_id):
return [str(session_id), str(session_id) + "e"]
def get_urls(project_id, session_id):
def get_urls(project_id, session_id, check_existence: bool = True):
results = []
for k in __get_mob_keys(project_id=project_id, session_id=session_id):
if check_existence and not s3.exists(bucket=config("sessions_bucket"), key=k):
continue
results.append(s3.client.generate_presigned_url(
'get_object',
Params={'Bucket': config("sessions_bucket"), 'Key': k},
@ -29,9 +31,11 @@ def get_urls(project_id, session_id):
return results
def get_urls_depercated(session_id):
def get_urls_depercated(session_id, check_existence: bool = True):
results = []
for k in __get_mob_keys_deprecated(session_id=session_id):
if check_existence and not s3.exists(bucket=config("sessions_bucket"), key=k):
continue
results.append(s3.client.generate_presigned_url(
'get_object',
Params={'Bucket': config("sessions_bucket"), 'Key': k},

View file

@ -3,15 +3,16 @@ from urllib.parse import urljoin
from decouple import config
import schemas
from chalicelib.core import sessions
from chalicelib.core.collaboration_msteams import MSTeams
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import pg_client, helper
from chalicelib.utils import sql_helper as sh
from chalicelib.utils.TimeUTC import TimeUTC
def get_note(tenant_id, project_id, user_id, note_id, share=None):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS creator_name
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
{",(SELECT name FROM users WHERE user_id=%(share)s AND deleted_at ISNULL) AS share_name" if share else ""}
FROM sessions_notes INNER JOIN users USING (user_id)
WHERE sessions_notes.project_id = %(project_id)s
@ -31,8 +32,8 @@ def get_note(tenant_id, project_id, user_id, note_id, share=None):
def get_session_notes(tenant_id, project_id, session_id, user_id):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT sessions_notes.*
FROM sessions_notes
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
FROM sessions_notes INNER JOIN users USING (user_id)
WHERE sessions_notes.project_id = %(project_id)s
AND sessions_notes.deleted_at IS NULL
AND sessions_notes.session_id = %(session_id)s
@ -57,18 +58,18 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se
if data.tags and len(data.tags) > 0:
k = "tag_value"
conditions.append(
sessions._multiple_conditions(f"%({k})s = sessions_notes.tag", data.tags, value_key=k))
extra_params = sessions._multiple_values(data.tags, value_key=k)
sh.multi_conditions(f"%({k})s = sessions_notes.tag", data.tags, value_key=k))
extra_params = sh.multi_values(data.tags, value_key=k)
if data.shared_only:
conditions.append("sessions_notes.is_public")
elif data.mine_only:
conditions.append("sessions_notes.user_id = %(user_id)s")
else:
conditions.append("(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public)")
query = cur.mogrify(f"""SELECT sessions_notes.*
FROM sessions_notes
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
FROM sessions_notes INNER JOIN users USING (user_id)
WHERE {" AND ".join(conditions)}
ORDER BY created_at {data.order}
ORDER BY created_at {data.order.value}
LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""",
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params})
@ -84,7 +85,7 @@ def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNote
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public)
VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s)
RETURNING *;""",
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""",
{"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.dict()})
cur.execute(query)
result = helper.dict_to_camel_case(cur.fetchone())
@ -113,7 +114,7 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot
AND user_id = %(user_id)s
AND note_id = %(note_id)s
AND deleted_at ISNULL
RETURNING *;""",
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""",
{"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.dict()})
)
row = helper.dict_to_camel_case(cur.fetchone())
@ -155,7 +156,7 @@ def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id):
blocks.append({"type": "context",
"elements": [{"type": "plain_text",
"text": f"Tag: *{note['tag']}*"}]})
bottom = f"Created by {note['creatorName'].capitalize()}"
bottom = f"Created by {note['userName'].capitalize()}"
if user_id != note["userId"]:
bottom += f"\nSent by {note['shareName']}: "
blocks.append({"type": "context",
@ -166,3 +167,60 @@ def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id):
webhook_id=webhook_id,
body={"blocks": blocks}
)
def share_to_msteams(tenant_id, user_id, project_id, note_id, webhook_id):
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
if note is None:
return {"errors": ["Note not found"]}
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}")
if note["timestamp"] > 0:
session_url += f"&jumpto={note['timestamp']}"
title = f"[Note for session {note['sessionId']}]({session_url})"
blocks = [{
"type": "TextBlock",
"text": title,
"style": "heading",
"size": "Large"
},
{
"type": "TextBlock",
"spacing": "Small",
"text": note["message"]
}
]
if note["tag"]:
blocks.append({"type": "TextBlock",
"spacing": "Small",
"text": f"Tag: *{note['tag']}*",
"size": "Small"})
bottom = f"Created by {note['userName'].capitalize()}"
if user_id != note["userId"]:
bottom += f"\nSent by {note['shareName']}: "
blocks.append({"type": "TextBlock",
"spacing": "Default",
"text": bottom,
"size": "Small",
"fontType": "Monospace"})
return MSTeams.send_raw(
tenant_id=tenant_id,
webhook_id=webhook_id,
body={"type": "message",
"attachments": [
{"contentType": "application/vnd.microsoft.card.adaptive",
"contentUrl": None,
"content": {
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"type": "AdaptiveCard",
"version": "1.5",
"body": [{
"type": "ColumnSet",
"style": "emphasis",
"separator": True,
"bleed": True,
"columns": [{"width": "stretch",
"items": blocks,
"type": "Column"}]
}]}}
]})

View file

@ -1,6 +1,7 @@
__author__ = "AZNAUROV David"
__maintainer__ = "KRAIEM Taha Yassine"
from chalicelib.utils import sql_helper as sh
import schemas
from chalicelib.core import events, metadata, sessions
@ -49,33 +50,33 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
continue
f["value"] = helper.values_for_operator(value=f["value"], op=f["operator"])
# filter_args = _multiple_values(f["value"])
op = sessions.__get_sql_operator(f["operator"])
op = sh.get_sql_operator(f["operator"])
filter_type = f["type"]
# values[f_k] = sessions.__get_sql_value_multiple(f["value"])
f_k = f"f_value{i}"
values = {**values,
**sessions._multiple_values(helper.values_for_operator(value=f["value"], op=f["operator"]),
value_key=f_k)}
**sh.multi_values(helper.values_for_operator(value=f["value"], op=f["operator"]),
value_key=f_k)}
if filter_type == schemas.FilterType.user_browser:
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.user_browser {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.user_browser {op} %({f_k})s', f["value"], value_key=f_k))
elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios]:
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.user_os {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.user_os {op} %({f_k})s', f["value"], value_key=f_k))
elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios]:
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.user_device {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.user_device {op} %({f_k})s', f["value"], value_key=f_k))
elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios]:
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.user_country {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.user_country {op} %({f_k})s', f["value"], value_key=f_k))
elif filter_type == schemas.FilterType.duration:
if len(f["value"]) > 0 and f["value"][0] is not None:
first_stage_extra_constraints.append(f's.duration >= %(minDuration)s')
@ -85,36 +86,36 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
values["maxDuration"] = f["value"][1]
elif filter_type == schemas.FilterType.referrer:
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
filter_extra_from = [f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"]
filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"]
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f"p.base_referrer {op} %({f_k})s", f["value"], value_key=f_k))
elif filter_type == events.event_type.METADATA.ui_type:
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f["value"], value_key=f_k))
elif filter_type == events.EventType.METADATA.ui_type:
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
# op = sessions.__get_sql_operator(f["operator"])
if f.get("key") in meta_keys.keys():
first_stage_extra_constraints.append(
sessions._multiple_conditions(
sh.multi_conditions(
f's.{metadata.index_to_colname(meta_keys[f["key"]])} {op} %({f_k})s', f["value"],
value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
# op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.user_id {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.user_id {op} %({f_k})s', f["value"], value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_ios]:
# op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.user_anonymous_id {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.user_anonymous_id {op} %({f_k})s', f["value"], value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
# op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.rev_id {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.rev_id {op} %({f_k})s', f["value"], value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
i = -1
for s in stages:
@ -124,7 +125,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
if not isinstance(s["value"], list):
s["value"] = [s["value"]]
is_any = sessions._isAny_opreator(s["operator"])
is_any = sh.isAny_opreator(s["operator"])
if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0:
continue
i += 1
@ -132,41 +133,42 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
else:
extra_from = []
op = sessions.__get_sql_operator(s["operator"])
event_type = s["type"].upper()
if event_type == events.event_type.CLICK.ui_type:
next_table = events.event_type.CLICK.table
next_col_name = events.event_type.CLICK.column
elif event_type == events.event_type.INPUT.ui_type:
next_table = events.event_type.INPUT.table
next_col_name = events.event_type.INPUT.column
elif event_type == events.event_type.LOCATION.ui_type:
next_table = events.event_type.LOCATION.table
next_col_name = events.event_type.LOCATION.column
elif event_type == events.event_type.CUSTOM.ui_type:
next_table = events.event_type.CUSTOM.table
next_col_name = events.event_type.CUSTOM.column
op = sh.get_sql_operator(s["operator"])
# event_type = s["type"].upper()
event_type = s["type"]
if event_type == events.EventType.CLICK.ui_type:
next_table = events.EventType.CLICK.table
next_col_name = events.EventType.CLICK.column
elif event_type == events.EventType.INPUT.ui_type:
next_table = events.EventType.INPUT.table
next_col_name = events.EventType.INPUT.column
elif event_type == events.EventType.LOCATION.ui_type:
next_table = events.EventType.LOCATION.table
next_col_name = events.EventType.LOCATION.column
elif event_type == events.EventType.CUSTOM.ui_type:
next_table = events.EventType.CUSTOM.table
next_col_name = events.EventType.CUSTOM.column
# IOS --------------
elif event_type == events.event_type.CLICK_IOS.ui_type:
next_table = events.event_type.CLICK_IOS.table
next_col_name = events.event_type.CLICK_IOS.column
elif event_type == events.event_type.INPUT_IOS.ui_type:
next_table = events.event_type.INPUT_IOS.table
next_col_name = events.event_type.INPUT_IOS.column
elif event_type == events.event_type.VIEW_IOS.ui_type:
next_table = events.event_type.VIEW_IOS.table
next_col_name = events.event_type.VIEW_IOS.column
elif event_type == events.event_type.CUSTOM_IOS.ui_type:
next_table = events.event_type.CUSTOM_IOS.table
next_col_name = events.event_type.CUSTOM_IOS.column
elif event_type == events.EventType.CLICK_IOS.ui_type:
next_table = events.EventType.CLICK_IOS.table
next_col_name = events.EventType.CLICK_IOS.column
elif event_type == events.EventType.INPUT_IOS.ui_type:
next_table = events.EventType.INPUT_IOS.table
next_col_name = events.EventType.INPUT_IOS.column
elif event_type == events.EventType.VIEW_IOS.ui_type:
next_table = events.EventType.VIEW_IOS.table
next_col_name = events.EventType.VIEW_IOS.column
elif event_type == events.EventType.CUSTOM_IOS.ui_type:
next_table = events.EventType.CUSTOM_IOS.table
next_col_name = events.EventType.CUSTOM_IOS.column
else:
print("=================UNDEFINED")
print(f"=================UNDEFINED:{event_type}")
continue
values = {**values, **sessions._multiple_values(helper.values_for_operator(value=s["value"], op=s["operator"]),
value_key=f"value{i + 1}")}
if sessions.__is_negation_operator(op) and i > 0:
op = sessions.__reverse_sql_operator(op)
values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]),
value_key=f"value{i + 1}")}
if sh.is_negation_operator(op) and i > 0:
op = sh.reverse_sql_operator(op)
main_condition = "left_not.session_id ISNULL"
extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id
FROM {next_table} AS s_main
@ -177,8 +179,8 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
if is_any:
main_condition = "TRUE"
else:
main_condition = sessions._multiple_conditions(f"main.{next_col_name} {op} %(value{i + 1})s",
values=s["value"], value_key=f"value{i + 1}")
main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s",
values=s["value"], value_key=f"value{i + 1}")
n_stages_query.append(f"""
(SELECT main.session_id,
{"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp
@ -319,7 +321,7 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues,
transitions ::: if transited from the first stage to the last - 1
else - 0
errors ::: a dictionary where the keys are all unique issues (currently context-wise)
errors ::: a dictionary WHERE the keys are all unique issues (currently context-wise)
the values are lists
if an issue happened between the first stage to the last - 1
else - 0

View file

@ -1,21 +0,0 @@
from datetime import datetime
from decouple import config
from chalicelib.core.collaboration_slack import Slack
def send_batch(notifications_list):
if notifications_list is None or len(notifications_list) == 0:
return
webhookId_map = {}
for n in notifications_list:
if n.get("destination") not in webhookId_map:
webhookId_map[n.get("destination")] = {"tenantId": n["notification"]["tenantId"], "batch": []}
webhookId_map[n.get("destination")]["batch"].append({"text": n["notification"]["description"] \
+ f"\n<{config('SITE_URL')}{n['notification']['buttonUrl']}|{n['notification']['buttonText']}>",
"title": n["notification"]["title"],
"title_link": n["notification"]["buttonUrl"],
"ts": datetime.now().timestamp()})
for batch in webhookId_map.keys():
Slack.send_batch(tenant_id=webhookId_map[batch]["tenantId"], webhook_id=batch,
attachments=webhookId_map[batch]["batch"])

View file

@ -1,4 +1,3 @@
import hashlib
from urllib.parse import urlparse
import requests
@ -8,17 +7,11 @@ from chalicelib.core import sourcemaps_parser
from chalicelib.utils import s3
def __get_key(project_id, url):
u = urlparse(url)
new_url = u.scheme + "://" + u.netloc + u.path
return f"{project_id}/{hashlib.md5(new_url.encode()).hexdigest()}"
def presign_share_urls(project_id, urls):
results = []
for u in urls:
results.append(s3.get_presigned_url_for_sharing(bucket=config('sourcemaps_bucket'), expires_in=120,
key=__get_key(project_id, u),
key=s3.generate_file_key_from_url(project_id, u),
check_exists=True))
return results
@ -28,7 +21,7 @@ def presign_upload_urls(project_id, urls):
for u in urls:
results.append(s3.get_presigned_url_for_upload(bucket=config('sourcemaps_bucket'),
expires_in=1800,
key=__get_key(project_id, u)))
key=s3.generate_file_key_from_url(project_id, u)))
return results
@ -94,7 +87,7 @@ def get_traces_group(project_id, payload):
file_exists_in_bucket = False
file_exists_in_server = False
file_url = u["absPath"]
key = __get_key(project_id, file_url) # use filename instead?
key = s3.generate_file_key_from_url(project_id, file_url) # use filename instead?
params_idx = file_url.find("?")
if file_url and len(file_url) > 0 \
and not (file_url[:params_idx] if params_idx > -1 else file_url).endswith(".js"):
@ -185,7 +178,7 @@ def fetch_missed_contexts(frames):
line = lines[l]
offset = c - MAX_COLUMN_OFFSET
if offset < 0: # if the line is shirt
if offset < 0: # if the line is short
offset = 0
frames[i]["context"].append([frames[i]["lineNo"], line[offset: c + MAX_COLUMN_OFFSET + 1]])
return frames

View file

@ -6,60 +6,50 @@ from chalicelib.core import users, license
def get_by_tenant_id(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""SELECT
tenant_id,
name,
api_key,
created_at,
'{license.EDITION}' AS edition,
openreplay_version() AS version_number,
opt_out
FROM public.tenants
LIMIT 1;""",
{"tenantId": tenant_id})
)
query = cur.mogrify(f"""SELECT tenants.tenant_id,
tenants.name,
tenants.api_key,
tenants.created_at,
'{license.EDITION}' AS edition,
openreplay_version() AS version_number,
tenants.opt_out
FROM public.tenants
LIMIT 1;""",
{"tenantId": tenant_id})
cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone())
def get_by_api_key(api_key):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""SELECT
1 AS tenant_id,
name,
created_at
FROM public.tenants
WHERE api_key = %(api_key)s
LIMIT 1;""",
{"api_key": api_key})
)
query = cur.mogrify(f"""SELECT 1 AS tenant_id,
tenants.name,
tenants.created_at
FROM public.tenants
WHERE tenants.api_key = %(api_key)s
LIMIT 1;""",
{"api_key": api_key})
cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone())
def generate_new_api_key(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""UPDATE public.tenants
SET api_key=generate_api_key(20)
RETURNING api_key;""",
{"tenant_id": tenant_id})
)
query = cur.mogrify(f"""UPDATE public.tenants
SET api_key=generate_api_key(20)
RETURNING api_key;""",
{"tenant_id": tenant_id})
cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone())
def edit_client(tenant_id, changes):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""\
UPDATE public.tenants
SET {", ".join([f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys()])}
RETURNING name, opt_out;""",
{"tenantId": tenant_id, **changes})
)
query = cur.mogrify(f"""UPDATE public.tenants
SET {", ".join([f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys()])}
RETURNING name, opt_out;""",
{"tenant_id": tenant_id, **changes})
cur.execute(query=query)
return helper.dict_to_camel_case(cur.fetchone())

View file

@ -22,7 +22,7 @@ def create_new_member(email, invitation_token, admin, name, owner=False):
query = cur.mogrify(f"""\
WITH u AS (INSERT INTO public.users (email, role, name, data)
VALUES (%(email)s, %(role)s, %(name)s, %(data)s)
RETURNING user_id,email,role,name
RETURNING user_id,email,role,name,created_at
),
au AS (INSERT INTO public.basic_authentication (user_id, invitation_token, invited_at)
VALUES ((SELECT user_id FROM u), %(invitation_token)s, timezone('utc'::text, now()))
@ -33,6 +33,7 @@ def create_new_member(email, invitation_token, admin, name, owner=False):
u.email,
u.role,
u.name,
u.created_at,
(CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member,
@ -41,10 +42,11 @@ def create_new_member(email, invitation_token, admin, name, owner=False):
{"email": email, "role": "owner" if owner else "admin" if admin else "member", "name": name,
"data": json.dumps({"lastAnnouncementView": TimeUTC.now()}),
"invitation_token": invitation_token})
cur.execute(
query
)
return helper.dict_to_camel_case(cur.fetchone())
cur.execute(query)
row = helper.dict_to_camel_case(cur.fetchone())
if row:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
return row
def restore_member(user_id, email, invitation_token, admin, name, owner=False):
@ -63,12 +65,11 @@ def restore_member(user_id, email, invitation_token, admin, name, owner=False):
name,
(CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member;""",
(CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member,
created_at;""",
{"user_id": user_id, "email": email,
"role": "owner" if owner else "admin" if admin else "member", "name": name})
cur.execute(
query
)
cur.execute(query)
result = cur.fetchone()
query = cur.mogrify("""\
UPDATE public.basic_authentication
@ -79,10 +80,9 @@ def restore_member(user_id, email, invitation_token, admin, name, owner=False):
WHERE user_id=%(user_id)s
RETURNING invitation_token;""",
{"user_id": user_id, "invitation_token": invitation_token})
cur.execute(
query
)
cur.execute(query)
result["invitation_token"] = cur.fetchone()["invitation_token"]
result["created_at"] = TimeUTC.datetime_to_timestamp(result["created_at"])
return helper.dict_to_camel_case(result)
@ -181,9 +181,7 @@ def create_member(tenant_id, user_id, data, background_tasks: BackgroundTasks):
if user:
return {"errors": ["user already exists"]}
name = data.get("name", None)
if name is not None and len(name) == 0:
return {"errors": ["invalid user name"]}
if name is None:
if name is None or len(name) == 0:
name = data["email"]
invitation_token = __generate_invitation_token()
user = get_deleted_user_by_email(email=data["email"])
@ -483,24 +481,8 @@ def change_password(tenant_id, user_id, email, old_password, new_password):
user = update(tenant_id=tenant_id, user_id=user_id, changes=changes)
r = authenticate(user['email'], new_password)
tenant_id = r.pop("tenantId")
r["limits"] = {
"teamMember": -1,
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(tenant_id)}
c = tenants.get_by_tenant_id(tenant_id)
c.pop("createdAt")
c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True,
stack_integrations=True)
c["smtp"] = helper.has_smtp()
c["iceServers"] = assist.get_ice_servers()
return {
'jwt': r.pop('jwt'),
'data': {
"user": r,
"client": c
}
'jwt': r.pop('jwt')
}
@ -532,14 +514,6 @@ def set_password_invitation(user_id, new_password):
}
def count_members():
with pg_client.PostgresClient() as cur:
cur.execute("""SELECT COUNT(user_id)
FROM public.users WHERE deleted_at IS NULL;""")
r = cur.fetchone()
return r["count"]
def email_exists(email):
with pg_client.PostgresClient() as cur:
cur.execute(
@ -602,12 +576,12 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
)
r = cur.fetchone()
return r is not None \
and r.get("jwt_iat") is not None \
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \
or (jwt_aud.startswith("plugin") \
and (r["changed_at"] is None \
or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000)))
)
and r.get("jwt_iat") is not None \
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \
or (jwt_aud.startswith("plugin") \
and (r["changed_at"] is None \
or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000)))
)
def change_jwt_iat(user_id):
@ -648,9 +622,9 @@ def authenticate(email, password, for_change_password=False):
return True
r = helper.dict_to_camel_case(r)
jwt_iat = change_jwt_iat(r['userId'])
iat = TimeUTC.datetime_to_timestamp(jwt_iat)
return {
"jwt": authorizers.generate_jwt(r['userId'], r['tenantId'],
TimeUTC.datetime_to_timestamp(jwt_iat),
"jwt": authorizers.generate_jwt(r['userId'], r['tenantId'], iat=iat,
aud=f"front:{helper.get_stage_name()}"),
"email": email,
**r

View file

@ -1,7 +1,11 @@
import logging
from typing import Optional
import requests
from fastapi import HTTPException
from starlette import status
import schemas
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
@ -12,7 +16,7 @@ def get_by_id(webhook_id):
cur.mogrify("""\
SELECT w.*
FROM public.webhooks AS w
where w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""",
WHERE w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""",
{"webhook_id": webhook_id})
)
w = helper.dict_to_camel_case(cur.fetchone())
@ -21,15 +25,14 @@ def get_by_id(webhook_id):
return w
def get(tenant_id, webhook_id):
def get_webhook(tenant_id, webhook_id, webhook_type='webhook'):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT
webhook_id AS integration_id, webhook_id AS id, w.*
FROM public.webhooks AS w
where w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""",
{"webhook_id": webhook_id})
cur.mogrify("""SELECT w.*
FROM public.webhooks AS w
WHERE w.webhook_id =%(webhook_id)s
AND deleted_at ISNULL AND type=%(webhook_type)s;""",
{"webhook_id": webhook_id, "webhook_type": webhook_type})
)
w = helper.dict_to_camel_case(cur.fetchone())
if w:
@ -40,11 +43,9 @@ def get(tenant_id, webhook_id):
def get_by_type(tenant_id, webhook_type):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT
w.webhook_id AS integration_id, w.webhook_id AS id,w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at
FROM public.webhooks AS w
WHERE w.type =%(type)s AND deleted_at ISNULL;""",
cur.mogrify("""SELECT w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at
FROM public.webhooks AS w
WHERE w.type =%(type)s AND deleted_at ISNULL;""",
{"type": webhook_type})
)
webhooks = helper.list_to_camel_case(cur.fetchall())
@ -55,22 +56,12 @@ def get_by_type(tenant_id, webhook_type):
def get_by_tenant(tenant_id, replace_none=False):
with pg_client.PostgresClient() as cur:
cur.execute("""\
SELECT
webhook_id AS integration_id, webhook_id AS id, w.*
FROM public.webhooks AS w
WHERE deleted_at ISNULL;"""
)
cur.execute("""SELECT w.*
FROM public.webhooks AS w
WHERE deleted_at ISNULL;""")
all = helper.list_to_camel_case(cur.fetchall())
if replace_none:
for w in all:
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
for k in w.keys():
if w[k] is None:
w[k] = ''
else:
for w in all:
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
for w in all:
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
return all
@ -83,7 +74,7 @@ def update(tenant_id, webhook_id, changes, replace_none=False):
UPDATE public.webhooks
SET {','.join(sub_query)}
WHERE webhook_id =%(id)s AND deleted_at ISNULL
RETURNING webhook_id AS integration_id, webhook_id AS id,*;""",
RETURNING *;""",
{"id": webhook_id, **changes})
)
w = helper.dict_to_camel_case(cur.fetchone())
@ -100,7 +91,7 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="",
query = cur.mogrify("""\
INSERT INTO public.webhooks(endpoint,auth_header,type,name)
VALUES (%(endpoint)s, %(auth_header)s, %(type)s,%(name)s)
RETURNING webhook_id AS integration_id, webhook_id AS id,*;""",
RETURNING *;""",
{"endpoint": endpoint, "auth_header": auth_header,
"type": webhook_type, "name": name})
cur.execute(
@ -115,7 +106,25 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="",
return w
def exists_by_name(name: str, exclude_id: Optional[int], webhook_type: str = schemas.WebhookType.webhook,
tenant_id: Optional[int] = None) -> bool:
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT EXISTS(SELECT 1
FROM public.webhooks
WHERE name ILIKE %(name)s
AND deleted_at ISNULL
AND type=%(webhook_type)s
{"AND webhook_id!=%(exclude_id)s" if exclude_id else ""}) AS exists;""",
{"name": name, "exclude_id": exclude_id, "webhook_type": webhook_type})
cur.execute(query)
row = cur.fetchone()
return row["exists"]
def add_edit(tenant_id, data, replace_none=None):
if "name" in data and len(data["name"]) > 0 \
and exists_by_name(name=data["name"], exclude_id=data.get("webhookId")):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if data.get("webhookId") is not None:
return update(tenant_id=tenant_id, webhook_id=data["webhookId"],
changes={"endpoint": data["endpoint"],

View file

@ -10,6 +10,10 @@ from decouple import config
from chalicelib.utils import smtp
loglevel = config("LOGLEVEL", default=logging.INFO)
print(f">Loglevel set to: {loglevel}")
logging.basicConfig(level=loglevel)
def __get_subject(subject):
return subject

View file

@ -0,0 +1,14 @@
from chalicelib.core import sourcemaps
def format_first_stack_frame(error):
error["stack"] = sourcemaps.format_payload(error.pop("payload"), truncate_to_first=True)
for s in error["stack"]:
for c in s.get("context", []):
for sci, sc in enumerate(c):
if isinstance(sc, str) and len(sc) > 1000:
c[sci] = sc[:1000]
# convert bytes to string:
if isinstance(s["filename"], bytes):
s["filename"] = s["filename"].decode("utf-8")
return error

View file

@ -3,6 +3,7 @@ import random
import re
import string
from typing import Union
from urllib.parse import urlparse
from decouple import config
@ -98,7 +99,7 @@ TRACK_TIME = True
def allow_captcha():
return config("captcha_server", default=None) is not None and config("captcha_key", default=None) is not None \
and len(config("captcha_server")) > 0 and len(config("captcha_key")) > 0
and len(config("captcha_server")) > 0 and len(config("captcha_key")) > 0
def string_to_sql_like(value):
@ -282,6 +283,7 @@ def custom_alert_to_front(values):
# to support frontend format for payload
if values.get("seriesId") is not None and values["query"]["left"] == schemas.AlertColumn.custom:
values["query"]["left"] = values["seriesId"]
values["seriesId"] = None
return values
@ -304,3 +306,16 @@ def __time_value(row):
def is_saml2_available():
return config("hastSAML2", default=False, cast=bool)
def get_domain():
_url = config("SITE_URL")
if not _url.startswith("http"):
_url = "http://" + _url
return '.'.join(urlparse(_url).netloc.split(".")[-2:])
def obfuscate(text, keep_last: int = 4):
if text is None or not isinstance(text, str):
return text
return "*" * (len(text) - keep_last) + text[-keep_last:]

View file

@ -1,60 +1,72 @@
<!DOCTYPE html>
<html>
<body style="margin: 0; padding: 0; font-family: -apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,'Helvetica Neue',sans-serif; color: #6c757d">
<table width="100%" border="0" style="background-color: #f6f6f6">
<tr>
<td>
<div style="border-radius: 3px; border-radius:4px; overflow: hidden; background-color: #ffffff; max-width: 600px; margin:20px auto;">
<table style="margin:20px auto; border:1px solid transparent; border-collapse:collapse; background-color: #ffffff; max-width:600px"
width="100%">
<!--Main Image-->
<tr>
<td style="padding:10px 30px;">
<center>
<img src="img/logo.png" alt="OpenReplay" width="100%" style="max-width: 120px;">
</center>
</td>
</tr>
<!--Main Title and Overview -->
<body
style="margin: 0; padding: 0; font-family: -apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,'Helvetica Neue',sans-serif; color: #6c757d">
<table width="100%" border="0" style="background-color: #f6f6f6">
<tr>
<td>
<div
style="border-radius: 3px; border-radius:4px; overflow: hidden; background-color: #ffffff; max-width: 600px; margin:20px auto;">
<table
style="margin:20px auto; border:1px solid transparent; border-collapse:collapse; background-color: #ffffff; max-width:600px"
width="100%">
<!--Main Image-->
<tr>
<td style="padding:10px 30px;">
<center>
<img src="img/logo.png" alt="OpenReplay" width="100%" style="max-width: 120px;">
</center>
</td>
</tr>
<tr>
<td style="padding:0 15px;">
<h1 style="font-family: -apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,'Helvetica Neue',sans-serif; color: #286f6a">
New alert!</h1>
<p style="font-family: -apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,'Helvetica Neue',sans-serif; color: #6c757d">
%(message)s</p>
<p style="font-family: -apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,'Helvetica Neue',sans-serif; color: #6c757d">
<a href="%(frontend_url)s/%(project_id)s/metrics">See metrics</a> for more details.</p>
<!--Main Title and Overview -->
</td>
</tr>
<!--Footer-->
<tr>
<td style="padding: 0 15px">
<div style="border-top:1px dotted rgba(0,0,0,0.2); display: block; margin-top: 20px"></div>
<center>
<p style="font-size: 12px; font-family: -apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,'Helvetica Neue',sans-serif; color: #6c757d">
Sent with &#9825; from OpenReplay &copy; 2022 - All rights reserved.<br><br>
<a href="https://openreplay.com" target="_blank"
style="text-decoration: none; color: #6c757d">https://openreplay.com/</a>
<tr>
<td style="padding:0 15px;">
<h1
style="font-family: -apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,'Helvetica Neue',sans-serif; color: #286f6a">
New alert!</h1>
<p
style="font-family: -apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,'Helvetica Neue',sans-serif; color: #6c757d">
%(message)s</p>
<p
style="font-family: -apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,'Helvetica Neue',sans-serif; color: #6c757d">
<a href="%(frontend_url)s/%(project_id)s/metrics">See metrics</a> for more details.
</p>
</center>
</td>
</tr>
</table>
</td>
</tr>
</div>
</td>
</tr>
</table>
<!--Footer-->
<tr>
<td style="padding: 0 15px">
<div style="border-top:1px dotted rgba(0,0,0,0.2); display: block; margin-top: 20px">
</div>
<center>
<p
style="font-size: 12px; font-family: -apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,'Helvetica Neue',sans-serif; color: #6c757d">
Sent with &#9825; from OpenReplay &copy;
<script>document.write(`${new Date().getFullYear()}`)</script> - All rights
reserved.<br><br>
<a href="https://openreplay.com" target="_blank"
style="text-decoration: none; color: #6c757d">https://openreplay.com/</a>
</p>
</center>
</td>
</tr>
</table>
</div>
</td>
</tr>
</table>
</body>
</html>
</html>

View file

@ -6,7 +6,7 @@ from jira import JIRA
from jira.exceptions import JIRAError
from requests.auth import HTTPBasicAuth
from starlette import status
from starlette.exceptions import HTTPException
from fastapi import HTTPException
fields = "id, summary, description, creator, reporter, created, assignee, status, updated, comment, issuetype, labels"

View file

@ -1,9 +1,13 @@
from botocore.exceptions import ClientError
from decouple import config
import hashlib
from datetime import datetime, timedelta
from urllib.parse import urlparse
import boto3
import botocore
from botocore.client import Config
from botocore.exceptions import ClientError
from decouple import config
from requests.models import PreparedRequest
if not config("S3_HOST", default=False):
client = boto3.client('s3')
@ -51,7 +55,7 @@ def get_presigned_url_for_sharing(bucket, expires_in, key, check_exists=False):
)
def get_presigned_url_for_upload(bucket, expires_in, key):
def get_presigned_url_for_upload(bucket, expires_in, key, **args):
return client.generate_presigned_url(
'put_object',
Params={
@ -62,6 +66,25 @@ def get_presigned_url_for_upload(bucket, expires_in, key):
)
def get_presigned_url_for_upload_secure(bucket, expires_in, key, conditions=None, public=False, content_type=None):
acl = 'private'
if public:
acl = 'public-read'
fields = {"acl": acl}
if content_type:
fields["Content-Type"] = content_type
url_parts = client.generate_presigned_post(
Bucket=bucket,
Key=key,
ExpiresIn=expires_in,
Fields=fields,
Conditions=conditions,
)
req = PreparedRequest()
req.prepare_url(f"{url_parts['url']}/{url_parts['fields']['key']}", url_parts['fields'])
return req.url
def get_file(source_bucket, source_key):
try:
result = client.get_object(
@ -88,3 +111,13 @@ def schedule_for_deletion(bucket, key):
s3_object.copy_from(CopySource={'Bucket': bucket, 'Key': key},
Expires=datetime.now() + timedelta(days=7),
MetadataDirective='REPLACE')
def generate_file_key(project_id, key):
return f"{project_id}/{hashlib.md5(key.encode()).hexdigest()}"
def generate_file_key_from_url(project_id, url):
u = urlparse(url)
new_url = u.scheme + "://" + u.netloc + u.path
return generate_file_key(project_id=project_id, key=new_url)

View file

@ -3,7 +3,7 @@ import smtplib
from smtplib import SMTPAuthenticationError
from decouple import config
from starlette.exceptions import HTTPException
from fastapi import HTTPException
class EmptySMTP:
@ -17,20 +17,20 @@ class SMTPClient:
def __init__(self):
if config("EMAIL_HOST") is None or len(config("EMAIL_HOST")) == 0:
return
elif config("EMAIL_USE_SSL").lower() == "false":
self.server = smtplib.SMTP(host=config("EMAIL_HOST"), port=int(config("EMAIL_PORT")))
elif not config("EMAIL_USE_SSL", cast=bool):
self.server = smtplib.SMTP(host=config("EMAIL_HOST"), port=config("EMAIL_PORT", cast=int))
else:
if len(config("EMAIL_SSL_KEY")) == 0 or len(config("EMAIL_SSL_CERT")) == 0:
self.server = smtplib.SMTP_SSL(host=config("EMAIL_HOST"), port=int(config("EMAIL_PORT")))
self.server = smtplib.SMTP_SSL(host=config("EMAIL_HOST"), port=config("EMAIL_PORT", cast=int))
else:
self.server = smtplib.SMTP_SSL(host=config("EMAIL_HOST"), port=int(config("EMAIL_PORT")),
self.server = smtplib.SMTP_SSL(host=config("EMAIL_HOST"), port=config("EMAIL_PORT", cast=int),
keyfile=config("EMAIL_SSL_KEY"), certfile=config("EMAIL_SSL_CERT"))
def __enter__(self):
if self.server is None:
return EmptySMTP()
self.server.ehlo()
if config("EMAIL_USE_SSL").lower() == "false" and config("EMAIL_USE_TLS").lower() == "true":
if not config("EMAIL_USE_SSL", cast=bool) and config("EMAIL_USE_TLS", cast=bool):
self.server.starttls()
# stmplib docs recommend calling ehlo() before & after starttls()
self.server.ehlo()

View file

@ -0,0 +1,53 @@
from typing import Union
import schemas
def get_sql_operator(op: Union[schemas.SearchEventOperator, schemas.ClickEventExtraOperator]):
return {
schemas.SearchEventOperator._is: "=",
schemas.SearchEventOperator._is_any: "IN",
schemas.SearchEventOperator._on: "=",
schemas.SearchEventOperator._on_any: "IN",
schemas.SearchEventOperator._is_not: "!=",
schemas.SearchEventOperator._not_on: "!=",
schemas.SearchEventOperator._contains: "ILIKE",
schemas.SearchEventOperator._not_contains: "NOT ILIKE",
schemas.SearchEventOperator._starts_with: "ILIKE",
schemas.SearchEventOperator._ends_with: "ILIKE",
}.get(op, "=")
def is_negation_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_not,
schemas.SearchEventOperator._not_on,
schemas.SearchEventOperator._not_contains]
def reverse_sql_operator(op):
return "=" if op == "!=" else "!=" if op == "=" else "ILIKE" if op == "NOT ILIKE" else "NOT ILIKE"
def multi_conditions(condition, values, value_key="value", is_not=False):
query = []
for i in range(len(values)):
k = f"{value_key}_{i}"
query.append(condition.replace(value_key, k))
return "(" + (" AND " if is_not else " OR ").join(query) + ")"
def multi_values(values, value_key="value"):
query_values = {}
if values is not None and isinstance(values, list):
for i in range(len(values)):
k = f"{value_key}_{i}"
query_values[k] = values[i]
return query_values
def isAny_opreator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._on_any, schemas.SearchEventOperator._is_any]
def isUndefined_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_undefined]

View file

@ -1,6 +0,0 @@
#!/bin/bash
cd utilities
nohup npm start &> /tmp/utilities.log &
cd ..
python env_handler.py
chalice local --no-autoreload --host 0.0.0.0 --stage ${ENTERPRISE_BUILD}

View file

@ -22,6 +22,7 @@ JWT_EXPIRATION=2592000
JWT_ISSUER=openreplay-oss
jwt_secret="SET A RANDOM STRING HERE"
ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s
ASSIST_KEY=
assist=/sockets-live
assistList=/sockets-list
pg_dbname=postgres
@ -50,4 +51,5 @@ DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob
PRESIGNED_URL_EXPIRATION=3600
ASSIST_JWT_EXPIRATION=144000
ASSIST_JWT_SECRET=
PYTHONUNBUFFERED=1
PYTHONUNBUFFERED=1
THUMBNAILS_BUCKET=thumbnails

View file

@ -1,15 +1,15 @@
requests==2.28.1
urllib3==1.26.12
boto3==1.26.14
requests==2.28.2
urllib3==1.26.14
boto3==1.26.70
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.5.1
elasticsearch==8.6.1
jira==3.4.1
fastapi==0.87.0
fastapi==0.92.0
uvicorn[standard]==0.20.0
python-decouple==3.6
pydantic[email]==1.10.2
apscheduler==3.9.1.post1
python-decouple==3.7
pydantic[email]==1.10.4
apscheduler==3.10.0

View file

@ -1,15 +1,15 @@
requests==2.28.1
urllib3==1.26.12
boto3==1.26.14
requests==2.28.2
urllib3==1.26.14
boto3==1.26.70
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.5.1
elasticsearch==8.6.1
jira==3.4.1
fastapi==0.87.0
fastapi==0.92.0
uvicorn[standard]==0.20.0
python-decouple==3.6
pydantic[email]==1.10.2
apscheduler==3.9.1.post1
python-decouple==3.7
pydantic[email]==1.10.4
apscheduler==3.10.0

File diff suppressed because it is too large Load diff

View file

@ -6,7 +6,7 @@ from starlette.responses import RedirectResponse, FileResponse
import schemas
from chalicelib.core import sessions, errors, errors_viewed, errors_favorite, sessions_assignments, heatmaps, \
sessions_favorite, assist, sessions_notes
sessions_favorite, assist, sessions_notes, click_maps
from chalicelib.core import sessions_viewed
from chalicelib.core import tenants, users, projects, license
from chalicelib.core import webhook
@ -20,7 +20,7 @@ public_app, app, app_apikey = get_routers()
@public_app.get('/signup', tags=['signup'])
def get_all_signup():
async def get_all_signup():
return {"data": {"tenants": tenants.tenants_exists(),
"sso": None,
"ssoProvider": None,
@ -28,7 +28,7 @@ def get_all_signup():
@app.get('/account', tags=['accounts'])
def get_account(context: schemas.CurrentContext = Depends(OR_context)):
async def get_account(context: schemas.CurrentContext = Depends(OR_context)):
r = users.get(tenant_id=context.tenant_id, user_id=context.user_id)
t = tenants.get_by_tenant_id(context.tenant_id)
if t is not None:
@ -46,33 +46,17 @@ def get_account(context: schemas.CurrentContext = Depends(OR_context)):
@app.post('/account', tags=["account"])
def edit_account(data: schemas.EditUserSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
async def edit_account(data: schemas.EditUserSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data,
editor_id=context.user_id)
@app.get('/projects/limit', tags=['projects'])
def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": {
"current": projects.count_by_tenant(tenant_id=context.tenant_id),
"remaining": -1
}}
@app.get('/projects/{projectId}', tags=['projects'])
def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = projects.get_project(tenant_id=context.tenant_id, project_id=projectId, include_last_session=True,
include_gdpr=True)
if data is None:
return {"errors": ["project not found"]}
return {"data": data}
@app.post('/integrations/slack', tags=['integrations'])
@app.put('/integrations/slack', tags=['integrations'])
def add_slack_client(data: schemas.AddSlackSchema, context: schemas.CurrentContext = Depends(OR_context)):
n = Slack.add_channel(tenant_id=context.tenant_id, url=data.url, name=data.name)
async def add_slack_integration(data: schemas.AddCollaborationSchema,
context: schemas.CurrentContext = Depends(OR_context)):
n = Slack.add(tenant_id=context.tenant_id, data=data)
if n is None:
return {
"errors": ["We couldn't send you a test message on your Slack channel. Please verify your webhook url."]
@ -81,10 +65,12 @@ def add_slack_client(data: schemas.AddSlackSchema, context: schemas.CurrentConte
@app.post('/integrations/slack/{integrationId}', tags=['integrations'])
def edit_slack_integration(integrationId: int, data: schemas.EditSlackSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
async def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if len(data.url) > 0:
old = webhook.get(tenant_id=context.tenant_id, webhook_id=integrationId)
old = Slack.get_integration(tenant_id=context.tenant_id, integration_id=integrationId)
if not old:
return {"errors": ["Slack integration not found."]}
if old["endpoint"] != data.url:
if not Slack.say_hello(data.url):
return {
@ -96,14 +82,14 @@ def edit_slack_integration(integrationId: int, data: schemas.EditSlackSchema = B
@app.post('/client/members', tags=["client"])
def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
async def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data.dict(),
background_tasks=background_tasks)
@public_app.get('/users/invitation', tags=['users'])
def process_invitation_link(token: str):
async def process_invitation_link(token: str):
if token is None or len(token) < 64:
return {"errors": ["please provide a valid invitation"]}
user = users.get_by_invitation_token(token)
@ -120,7 +106,7 @@ def process_invitation_link(token: str):
@public_app.post('/password/reset', tags=["users"])
def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = Body(...)):
async def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = Body(...)):
if data is None or len(data.invitation) < 64 or len(data.passphrase) < 8:
return {"errors": ["please provide a valid invitation & pass"]}
user = users.get_by_invitation_token(token=data.invitation, pass_token=data.passphrase)
@ -133,15 +119,15 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema =
@app.put('/client/members/{memberId}', tags=["client"])
def edit_member(memberId: int, data: schemas.EditMemberSchema,
context: schemas.CurrentContext = Depends(OR_context)):
async def edit_member(memberId: int, data: schemas.EditMemberSchema,
context: schemas.CurrentContext = Depends(OR_context)):
return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data,
user_id_to_update=memberId)
@app.get('/metadata/session_search', tags=["metadata"])
def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = None,
context: schemas.CurrentContext = Depends(OR_context)):
async def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = None,
context: schemas.CurrentContext = Depends(OR_context)):
if key is None or value is None or len(value) == 0 and len(key) == 0:
return {"errors": ["please provide a key&value for search"]}
if len(value) == 0:
@ -153,20 +139,15 @@ def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] =
m_key=key, project_id=projectId)}
@public_app.get('/general_stats', tags=["private"], include_in_schema=False)
def get_general_stats():
return {"data": {"sessions:": sessions.count_all()}}
@app.get('/projects', tags=['projects'])
def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
async def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True,
stack_integrations=True)}
@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"])
def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
context: schemas.CurrentContext = Depends(OR_context)):
async def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
context: schemas.CurrentContext = Depends(OR_context)):
if isinstance(sessionId, str):
return {"errors": ["session not found"]}
data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True,
@ -182,8 +163,8 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba
@app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"])
def get_error_trace(projectId: int, sessionId: int, errorId: str,
context: schemas.CurrentContext = Depends(OR_context)):
async def get_error_trace(projectId: int, sessionId: int, errorId: str,
context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_trace(project_id=projectId, error_id=errorId)
if "errors" in data:
return data
@ -193,20 +174,20 @@ def get_error_trace(projectId: int, sessionId: int, errorId: str,
@app.post('/{projectId}/errors/search', tags=['errors'])
def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
async def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": errors.search(data, projectId, user_id=context.user_id)}
@app.get('/{projectId}/errors/stats', tags=['errors'])
def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int,
context: schemas.CurrentContext = Depends(OR_context)):
async def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int,
context: schemas.CurrentContext = Depends(OR_context)):
return errors.stats(projectId, user_id=context.user_id, startTimestamp=startTimestamp, endTimestamp=endTimestamp)
@app.get('/{projectId}/errors/{errorId}', tags=['errors'])
def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24,
density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)):
async def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24,
density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId,
**{"density24": density24, "density30": density30})
if data.get("data") is not None:
@ -216,17 +197,17 @@ def errors_get_details(projectId: int, errorId: str, background_tasks: Backgroun
@app.get('/{projectId}/errors/{errorId}/stats', tags=['errors'])
def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7),
endDate: int = TimeUTC.now(), density: int = 7,
context: schemas.CurrentContext = Depends(OR_context)):
async def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7),
endDate: int = TimeUTC.now(), density: int = 7,
context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_details_chart(project_id=projectId, user_id=context.user_id, error_id=errorId,
**{"startDate": startDate, "endDate": endDate, "density": density})
return data
@app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors'])
def errors_get_details_sourcemaps(projectId: int, errorId: str,
context: schemas.CurrentContext = Depends(OR_context)):
async def errors_get_details_sourcemaps(projectId: int, errorId: str,
context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_trace(project_id=projectId, error_id=errorId)
if "errors" in data:
return data
@ -236,8 +217,9 @@ def errors_get_details_sourcemaps(projectId: int, errorId: str,
@app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"])
def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7),
endDate: int = TimeUTC.now(), context: schemas.CurrentContext = Depends(OR_context)):
async def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7),
endDate: int = TimeUTC.now(),
context: schemas.CurrentContext = Depends(OR_context)):
if action == "favorite":
return errors_favorite.favorite_error(project_id=projectId, user_id=context.user_id, error_id=errorId)
elif action == "sessions":
@ -253,8 +235,8 @@ def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDa
@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"])
def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks,
context: schemas.CurrentContext = Depends(OR_context)):
async def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks,
context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId)
if data is None:
data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId,
@ -268,8 +250,8 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun
@app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"])
def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)):
async def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)):
not_found = {"errors": ["Replay file not found"]}
if isinstance(sessionId, str):
print(f"{sessionId} not a valid number.")
@ -288,8 +270,8 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
@app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"])
def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)):
async def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)):
not_found = {"errors": ["Devtools file not found"]}
if isinstance(sessionId, str):
print(f"{sessionId} not a valid number.")
@ -308,20 +290,20 @@ def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
@app.post('/{projectId}/heatmaps/url', tags=["heatmaps"])
def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": heatmaps.get_by_url(project_id=projectId, data=data.dict())}
async def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": heatmaps.get_by_url(project_id=projectId, data=data)}
@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"])
def add_remove_favorite_session2(projectId: int, sessionId: int,
context: schemas.CurrentContext = Depends(OR_context)):
async def add_remove_favorite_session2(projectId: int, sessionId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return {
"data": sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId)}
@app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"])
def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)):
async def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_assignments.get_by_session(project_id=projectId, session_id=sessionId,
tenant_id=context.tenant_id,
user_id=context.user_id)
@ -333,8 +315,8 @@ def assign_session(projectId: int, sessionId, context: schemas.CurrentContext =
@app.get('/{projectId}/sessions/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"])
def assign_session(projectId: int, sessionId: int, issueId: str,
context: schemas.CurrentContext = Depends(OR_context)):
async def assign_session(projectId: int, sessionId: int, issueId: str,
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId,
tenant_id=context.tenant_id, user_id=context.user_id)
if "errors" in data:
@ -345,8 +327,9 @@ def assign_session(projectId: int, sessionId: int, issueId: str,
@app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"])
def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
async def comment_assignment(projectId: int, sessionId: int, issueId: str,
data: schemas.CommentAssignmentSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId,
session_id=sessionId, assignment_id=issueId,
user_id=context.user_id, message=data.message)
@ -358,8 +341,8 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schem
@app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"])
def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
async def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if not sessions.session_exists(project_id=projectId, session_id=sessionId):
return {"errors": ["Session not found"]}
data = sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId,
@ -372,7 +355,7 @@ def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema
@app.get('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"])
def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId,
session_id=sessionId, user_id=context.user_id)
if "errors" in data:
@ -383,8 +366,8 @@ def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentCo
@app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
async def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, data=data)
if "errors" in data.keys():
@ -395,24 +378,37 @@ def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema
@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.delete(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId)
return data
@app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"])
def share_note_to_slack(projectId: int, noteId: int, webhookId: int,
context: schemas.CurrentContext = Depends(OR_context)):
async def share_note_to_slack(projectId: int, noteId: int, webhookId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId)
@app.get('/{projectId}/notes/{noteId}/msteams/{webhookId}', tags=["sessions", "notes"])
async def share_note_to_msteams(projectId: int, noteId: int, webhookId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return sessions_notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId)
@app.post('/{projectId}/notes', tags=["sessions", "notes"])
def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
async def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId,
user_id=context.user_id, data=data)
if "errors" in data:
return data
return {'data': data}
@app.post('/{projectId}/click_maps/search', tags=["click maps"])
async def click_map_search(projectId: int, data: schemas.FlatClickMapSessionsSearch = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)}

View file

@ -1,3 +1,6 @@
from apscheduler.triggers.cron import CronTrigger
from apscheduler.triggers.interval import IntervalTrigger
from chalicelib.core import telemetry
from chalicelib.core import weekly_report, jobs
@ -15,7 +18,10 @@ async def telemetry_cron() -> None:
cron_jobs = [
{"func": telemetry_cron, "trigger": "cron", "day_of_week": "*"},
{"func": run_scheduled_jobs, "trigger": "interval", "seconds": 60, "misfire_grace_time": 20},
{"func": weekly_report2, "trigger": "cron", "day_of_week": "mon", "hour": 5, "misfire_grace_time": 60 * 60}
{"func": telemetry_cron, "trigger": CronTrigger(day_of_week="*"),
"misfire_grace_time": 60 * 60, "max_instances": 1},
{"func": run_scheduled_jobs, "trigger": IntervalTrigger(minutes=1),
"misfire_grace_time": 20, "max_instances": 1},
{"func": weekly_report2, "trigger": CronTrigger(day_of_week="mon", hour=5),
"misfire_grace_time": 60 * 60, "max_instances": 1}
]

View file

@ -1,398 +0,0 @@
from fastapi import Body
import schemas
from chalicelib.core import metrics
from chalicelib.core import metadata
from chalicelib.utils import helper
from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@app.get('/{projectId}/dashboard/metadata', tags=["dashboard", "metrics"])
def get_metadata_map(projectId: int):
metamap = []
for m in metadata.get(project_id=projectId):
metamap.append({"name": m["key"], "key": f"metadata{m['index']}"})
return {"data": metamap}
@app.post('/{projectId}/dashboard/sessions', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/sessions', tags=["dashboard", "metrics"])
def get_dashboard_processed_sessions(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_processed_sessions(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/errors', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/errors', tags=["dashboard", "metrics"])
def get_dashboard_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_errors(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/errors_trend', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/errors_trend', tags=["dashboard", "metrics"])
def get_dashboard_errors_trend(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_errors_trend(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/application_activity', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/application_activity', tags=["dashboard", "metrics"])
def get_dashboard_application_activity(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_application_activity(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/page_metrics', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/page_metrics', tags=["dashboard", "metrics"])
def get_dashboard_page_metrics(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_page_metrics(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/user_activity', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/user_activity', tags=["dashboard", "metrics"])
def get_dashboard_user_activity(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_user_activity(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/performance', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/performance', tags=["dashboard", "metrics"])
def get_dashboard_performance(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_performance(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/slowest_images', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/slowest_images', tags=["dashboard", "metrics"])
def get_dashboard_slowest_images(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_slowest_images(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/missing_resources', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/missing_resources', tags=["dashboard", "metrics"])
def get_performance_sessions(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_missing_resources_trend(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/network', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/network', tags=["dashboard", "metrics"])
def get_network_widget(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_network(project_id=projectId, **data.dict())}
@app.get('/{projectId}/dashboard/{widget}/search', tags=["dashboard", "metrics"])
def get_dashboard_autocomplete(projectId: int, widget: str, q: str, type: str = "", platform: str = None,
key: str = ""):
if q is None or len(q) == 0:
return {"data": []}
q = '^' + q
if widget in ['performance']:
data = metrics.search(q, type, project_id=projectId,
platform=platform, performance=True)
elif widget in ['pages', 'pages_dom_buildtime', 'top_metrics', 'time_to_render',
'impacted_sessions_by_slow_pages', 'pages_response_time']:
data = metrics.search(q, type, project_id=projectId,
platform=platform, pages_only=True)
elif widget in ['resources_loading_time']:
data = metrics.search(q, type, project_id=projectId,
platform=platform, performance=False)
elif widget in ['time_between_events', 'events']:
data = metrics.search(q, type, project_id=projectId,
platform=platform, performance=False, events_only=True)
elif widget in ['metadata']:
data = metrics.search(q, None, project_id=projectId,
platform=platform, metadata=True, key=key)
else:
return {"errors": [f"unsupported widget: {widget}"]}
return {'data': data}
# 1
@app.post('/{projectId}/dashboard/slowest_resources', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/slowest_resources', tags=["dashboard", "metrics"])
def get_dashboard_slowest_resources(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_slowest_resources(project_id=projectId, **data.dict())}
# 2
@app.post('/{projectId}/dashboard/resources_loading_time', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resources_loading_time', tags=["dashboard", "metrics"])
def get_dashboard_resources(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_resources_loading_time(project_id=projectId, **data.dict())}
# 3
@app.post('/{projectId}/dashboard/pages_dom_buildtime', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/pages_dom_buildtime', tags=["dashboard", "metrics"])
def get_dashboard_pages_dom(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_pages_dom_build_time(project_id=projectId, **data.dict())}
# 4
@app.post('/{projectId}/dashboard/busiest_time_of_day', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/busiest_time_of_day', tags=["dashboard", "metrics"])
def get_dashboard_busiest_time_of_day(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_busiest_time_of_day(project_id=projectId, **data.dict())}
# 5
@app.post('/{projectId}/dashboard/sessions_location', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/sessions_location', tags=["dashboard", "metrics"])
def get_dashboard_sessions_location(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_sessions_location(project_id=projectId, **data.dict())}
# 6
@app.post('/{projectId}/dashboard/speed_location', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/speed_location', tags=["dashboard", "metrics"])
def get_dashboard_speed_location(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_speed_index_location(project_id=projectId, **data.dict())}
# 7
@app.post('/{projectId}/dashboard/pages_response_time', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/pages_response_time', tags=["dashboard", "metrics"])
def get_dashboard_pages_response_time(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_pages_response_time(project_id=projectId, **data.dict())}
# 8
@app.post('/{projectId}/dashboard/pages_response_time_distribution', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/pages_response_time_distribution', tags=["dashboard", "metrics"])
def get_dashboard_pages_response_time_distribution(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_pages_response_time_distribution(project_id=projectId, **data.dict())}
# 9
@app.post('/{projectId}/dashboard/top_metrics', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/top_metrics', tags=["dashboard", "metrics"])
def get_dashboard_top_metrics(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_top_metrics(project_id=projectId, **data.dict())}
# 10
@app.post('/{projectId}/dashboard/time_to_render', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/time_to_render', tags=["dashboard", "metrics"])
def get_dashboard_time_to_render(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_time_to_render(project_id=projectId, **data.dict())}
# 11
@app.post('/{projectId}/dashboard/impacted_sessions_by_slow_pages', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/impacted_sessions_by_slow_pages', tags=["dashboard", "metrics"])
def get_dashboard_impacted_sessions_by_slow_pages(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_impacted_sessions_by_slow_pages(project_id=projectId, **data.dict())}
# 12
@app.post('/{projectId}/dashboard/memory_consumption', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/memory_consumption', tags=["dashboard", "metrics"])
def get_dashboard_memory_consumption(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_memory_consumption(project_id=projectId, **data.dict())}
# 12.1
@app.post('/{projectId}/dashboard/fps', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/fps', tags=["dashboard", "metrics"])
def get_dashboard_avg_fps(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_avg_fps(project_id=projectId, **data.dict())}
# 12.2
@app.post('/{projectId}/dashboard/cpu', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/cpu', tags=["dashboard", "metrics"])
def get_dashboard_avg_cpu(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_avg_cpu(project_id=projectId, **data.dict())}
# 13
@app.post('/{projectId}/dashboard/crashes', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/crashes', tags=["dashboard", "metrics"])
def get_dashboard_impacted_sessions_by_slow_pages(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_crashes(project_id=projectId, **data.dict())}
# 14
@app.post('/{projectId}/dashboard/domains_errors', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/domains_errors', tags=["dashboard", "metrics"])
def get_dashboard_domains_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_domains_errors(project_id=projectId, **data.dict())}
# 14.1
@app.post('/{projectId}/dashboard/domains_errors_4xx', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/domains_errors_4xx', tags=["dashboard", "metrics"])
def get_dashboard_domains_errors_4xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_domains_errors_4xx(project_id=projectId, **data.dict())}
# 14.2
@app.post('/{projectId}/dashboard/domains_errors_5xx', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/domains_errors_5xx', tags=["dashboard", "metrics"])
def get_dashboard_domains_errors_5xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_domains_errors_5xx(project_id=projectId, **data.dict())}
# 15
@app.post('/{projectId}/dashboard/slowest_domains', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/slowest_domains', tags=["dashboard", "metrics"])
def get_dashboard_slowest_domains(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_slowest_domains(project_id=projectId, **data.dict())}
# 16
@app.post('/{projectId}/dashboard/errors_per_domains', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/errors_per_domains', tags=["dashboard", "metrics"])
def get_dashboard_errors_per_domains(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_errors_per_domains(project_id=projectId, **data.dict())}
# 17
@app.post('/{projectId}/dashboard/sessions_per_browser', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/sessions_per_browser', tags=["dashboard", "metrics"])
def get_dashboard_sessions_per_browser(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_sessions_per_browser(project_id=projectId, **data.dict())}
# 18
@app.post('/{projectId}/dashboard/calls_errors', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/calls_errors', tags=["dashboard", "metrics"])
def get_dashboard_calls_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_calls_errors(project_id=projectId, **data.dict())}
# 18.1
@app.post('/{projectId}/dashboard/calls_errors_4xx', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/calls_errors_4xx', tags=["dashboard", "metrics"])
def get_dashboard_calls_errors_4xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_calls_errors_4xx(project_id=projectId, **data.dict())}
# 18.2
@app.post('/{projectId}/dashboard/calls_errors_5xx', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/calls_errors_5xx', tags=["dashboard", "metrics"])
def get_dashboard_calls_errors_5xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_calls_errors_5xx(project_id=projectId, **data.dict())}
# 19
@app.post('/{projectId}/dashboard/errors_per_type', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/errors_per_type', tags=["dashboard", "metrics"])
def get_dashboard_errors_per_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_errors_per_type(project_id=projectId, **data.dict())}
# 20
@app.post('/{projectId}/dashboard/resources_by_party', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resources_by_party', tags=["dashboard", "metrics"])
def get_dashboard_resources_by_party(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_resources_by_party(project_id=projectId, **data.dict())}
# 21
@app.post('/{projectId}/dashboard/resource_type_vs_response_end', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resource_type_vs_response_end', tags=["dashboard", "metrics"])
def get_dashboard_errors_per_resource_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.resource_type_vs_response_end(project_id=projectId, **data.dict())}
# 22
@app.post('/{projectId}/dashboard/resources_vs_visually_complete', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resources_vs_visually_complete', tags=["dashboard", "metrics"])
def get_dashboard_resources_vs_visually_complete(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_resources_vs_visually_complete(project_id=projectId, **data.dict())}
# 23
@app.post('/{projectId}/dashboard/impacted_sessions_by_js_errors', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/impacted_sessions_by_js_errors', tags=["dashboard", "metrics"])
def get_dashboard_impacted_sessions_by_js_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_impacted_sessions_by_js_errors(project_id=projectId, **data.dict())}
# 24
@app.post('/{projectId}/dashboard/resources_count_by_type', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resources_count_by_type', tags=["dashboard", "metrics"])
def get_dashboard_resources_count_by_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_resources_count_by_type(project_id=projectId, **data.dict())}
# # 25
# @app.post('/{projectId}/dashboard/time_between_events', tags=["dashboard", "metrics"])
# @app.get('/{projectId}/dashboard/time_between_events', tags=["dashboard", "metrics"])
# def get_dashboard_resources_count_by_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
# return {"errors": ["please choose 2 events"]}
@app.post('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"])
def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
results = [
{"key": "count_sessions",
"data": metrics.get_processed_sessions(project_id=projectId, **data.dict())},
*helper.explode_widget(data={**metrics.get_application_activity(project_id=projectId, **data.dict()),
"chart": metrics.get_performance(project_id=projectId, **data.dict())
.get("chart", [])}),
*helper.explode_widget(data=metrics.get_page_metrics(project_id=projectId, **data.dict())),
*helper.explode_widget(data=metrics.get_user_activity(project_id=projectId, **data.dict())),
{"key": "avg_pages_dom_buildtime",
"data": metrics.get_pages_dom_build_time(project_id=projectId, **data.dict())},
{"key": "avg_pages_response_time",
"data": metrics.get_pages_response_time(project_id=projectId, **data.dict())
},
*helper.explode_widget(metrics.get_top_metrics(project_id=projectId, **data.dict())),
{"key": "avg_time_to_render", "data": metrics.get_time_to_render(project_id=projectId, **data.dict())},
{"key": "avg_used_js_heap_size", "data": metrics.get_memory_consumption(project_id=projectId, **data.dict())},
{"key": "avg_cpu", "data": metrics.get_avg_cpu(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_fps,
"data": metrics.get_avg_fps(project_id=projectId, **data.dict())}
]
results = sorted(results, key=lambda r: r["key"])
return {"data": results}
@app.post('/{projectId}/dashboard/overview2', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/overview2', tags=["dashboard", "metrics"])
def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
results = [
{"key": schemas.TemplatePredefinedKeys.count_sessions,
"data": metrics.get_processed_sessions(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_image_load_time,
"data": metrics.get_application_activity_avg_image_load_time(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_page_load_time,
"data": metrics.get_application_activity_avg_page_load_time(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_request_load_time,
"data": metrics.get_application_activity_avg_request_load_time(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_dom_content_load_start,
"data": metrics.get_page_metrics_avg_dom_content_load_start(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_first_contentful_pixel,
"data": metrics.get_page_metrics_avg_first_contentful_pixel(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_visited_pages,
"data": metrics.get_user_activity_avg_visited_pages(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_session_duration,
"data": metrics.get_user_activity_avg_session_duration(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_pages_dom_buildtime,
"data": metrics.get_pages_dom_build_time(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_pages_response_time,
"data": metrics.get_pages_response_time(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_response_time,
"data": metrics.get_top_metrics_avg_response_time(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_first_paint,
"data": metrics.get_top_metrics_avg_first_paint(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_dom_content_loaded,
"data": metrics.get_top_metrics_avg_dom_content_loaded(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_till_first_bit,
"data": metrics.get_top_metrics_avg_till_first_bit(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_time_to_interactive,
"data": metrics.get_top_metrics_avg_time_to_interactive(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.count_requests,
"data": metrics.get_top_metrics_count_requests(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_time_to_render,
"data": metrics.get_time_to_render(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_used_js_heap_size,
"data": metrics.get_memory_consumption(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_cpu,
"data": metrics.get_avg_cpu(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_fps,
"data": metrics.get_avg_fps(project_id=projectId, **data.dict())}
]
results = sorted(results, key=lambda r: r["key"])
return {"data": results}

View file

@ -1,4 +1,6 @@
from fastapi import Body, Depends
from typing import Union
from fastapi import Body, Depends, Request
import schemas
from chalicelib.core import dashboards, custom_metrics, funnels
@ -46,11 +48,12 @@ def pin_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentCont
return {"data": dashboards.pin_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)}
@app.post('/{projectId}/dashboards/{dashboardId}/cards', tags=["cards"])
@app.post('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"])
@app.put('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"])
def add_widget_to_dashboard(projectId: int, dashboardId: int,
data: schemas.AddWidgetToDashboardPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def add_card_to_dashboard(projectId: int, dashboardId: int,
data: schemas.AddWidgetToDashboardPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.add_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
data=data)}
@ -58,7 +61,7 @@ def add_widget_to_dashboard(projectId: int, dashboardId: int,
@app.post('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"])
@app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"])
def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int,
data: schemas.CreateCustomMetricsSchema = Body(...),
data: schemas.CreateCardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.create_metric_add_widget(project_id=projectId, user_id=context.user_id,
dashboard_id=dashboardId, data=data)}
@ -80,43 +83,41 @@ def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int
widget_id=widgetId)
@app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}/chart', tags=["dashboard"])
def get_widget_chart(projectId: int, dashboardId: int, widgetId: int,
data: schemas.CustomMetricChartPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = dashboards.make_chart_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
widget_id=widgetId, data=data)
if data is None:
return {"errors": ["widget not found"]}
return {"data": data}
@app.get('/{projectId}/metrics/templates', tags=["dashboard"])
def get_templates(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.get_templates(project_id=projectId, user_id=context.user_id)}
# @app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}/chart', tags=["dashboard"])
# def get_widget_chart(projectId: int, dashboardId: int, widgetId: int,
# data: schemas.CardChartSchema = Body(...),
# context: schemas.CurrentContext = Depends(OR_context)):
# data = dashboards.make_chart_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
# widget_id=widgetId, data=data)
# if data is None:
# return {"errors": ["widget not found"]}
# return {"data": data}
@app.post('/{projectId}/cards/try', tags=["cards"])
@app.post('/{projectId}/metrics/try', tags=["dashboard"])
@app.put('/{projectId}/metrics/try', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"])
def try_custom_metric(projectId: int, data: schemas.TryCustomMetricsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def try_card(projectId: int, data: schemas.CreateCardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.merged_live(project_id=projectId, data=data, user_id=context.user_id)}
@app.post('/{projectId}/cards/try/sessions', tags=["cards"])
@app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"])
def try_custom_metric_sessions(projectId: int, data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.try_sessions(project_id=projectId, user_id=context.user_id, data=data)
return {"data": data}
@app.post('/{projectId}/cards/try/issues', tags=["cards"])
@app.post('/{projectId}/metrics/try/issues', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try/issues', tags=["customMetrics"])
def try_custom_metric_funnel_issues(projectId: int, data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def try_card_funnel_issues(projectId: int, data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if len(data.series) == 0:
return {"data": []}
data.series[0].filter.startDate = data.startTimestamp
@ -125,46 +126,72 @@ def try_custom_metric_funnel_issues(projectId: int, data: schemas.CustomMetricSe
return {"data": data}
@app.get('/{projectId}/cards', tags=["cards"])
@app.get('/{projectId}/metrics', tags=["dashboard"])
@app.get('/{projectId}/custom_metrics', tags=["customMetrics"])
def get_cards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)}
@app.post('/{projectId}/cards', tags=["cards"])
@app.post('/{projectId}/metrics', tags=["dashboard"])
@app.put('/{projectId}/metrics', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics', tags=["customMetrics"])
def add_custom_metric(projectId: int, data: schemas.CreateCustomMetricsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def create_card(projectId: int, data: schemas.CreateCardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return custom_metrics.create(project_id=projectId, user_id=context.user_id, data=data)
@app.get('/{projectId}/metrics', tags=["dashboard"])
@app.get('/{projectId}/custom_metrics', tags=["customMetrics"])
def get_custom_metrics(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)}
@app.post('/{projectId}/cards/search', tags=["cards"])
@app.post('/{projectId}/metrics/search', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/search', tags=["customMetrics"])
def search_cards(projectId: int, data: schemas.SearchCardsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.search_all(project_id=projectId, user_id=context.user_id, data=data)}
@app.get('/{projectId}/cards/{metric_id}', tags=["cards"])
@app.get('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.get('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def get_custom_metric(projectId: int, metric_id: str, context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get(project_id=projectId, user_id=context.user_id, metric_id=metric_id)
def get_card(projectId: int, metric_id: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)):
if not isinstance(metric_id, int):
return {"errors": ["invalid card_id"]}
data = custom_metrics.get_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id)
if data is None:
return {"errors": ["custom metric not found"]}
return {"errors": ["card not found"]}
return {"data": data}
# @app.get('/{projectId}/cards/{metric_id}/thumbnail', tags=["cards"])
# def sign_thumbnail_for_upload(projectId: int, metric_id: Union[int, str],
# context: schemas.CurrentContext = Depends(OR_context)):
# if not isinstance(metric_id, int):
# return {"errors": ["invalid card_id"]}
# return custom_metrics.add_thumbnail(metric_id=metric_id, user_id=context.user_id, project_id=projectId)
@app.post('/{projectId}/cards/{metric_id}/sessions', tags=["cards"])
@app.post('/{projectId}/metrics/{metric_id}/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/sessions', tags=["customMetrics"])
def get_custom_metric_sessions(projectId: int, metric_id: int,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def get_card_sessions(projectId: int, metric_id: int,
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_sessions(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
if data is None:
return {"errors": ["custom metric not found"]}
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/issues', tags=["cards"])
@app.post('/{projectId}/metrics/{metric_id}/issues', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/issues', tags=["customMetrics"])
def get_custom_metric_funnel_issues(projectId: int, metric_id: int,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def get_card_funnel_issues(projectId: int, metric_id: Union[int, str],
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if not isinstance(metric_id, int):
return {"errors": [f"invalid card_id: {metric_id}"]}
data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data)
if data is None:
@ -172,10 +199,11 @@ def get_custom_metric_funnel_issues(projectId: int, metric_id: int,
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/issues/{issueId}/sessions', tags=["customMetrics"])
def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_funnel_sessions_by_issue(project_id=projectId, user_id=context.user_id,
metric_id=metric_id, issue_id=issueId, data=data)
@ -184,10 +212,11 @@ def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: st
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/errors', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"])
def get_custom_metric_errors_list(projectId: int, metric_id: int,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data)
@ -196,22 +225,22 @@ def get_custom_metric_errors_list(projectId: int, metric_id: int,
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/chart', tags=["card"])
@app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"])
def get_custom_metric_chart(projectId: int, metric_id: int, data: schemas.CustomMetricChartPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = dashboards.make_chart_metrics(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data)
if data is None:
return {"errors": ["custom metric not found"]}
def get_card_chart(projectId: int, metric_id: int, request: Request, data: schemas.CardChartSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.make_chart_from_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data)
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.put('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCustomMetricsSchema = Body(...),
def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.update(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
if data is None:
@ -219,6 +248,7 @@ def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCus
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/status', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"])
@app.put('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"])
@ -231,6 +261,7 @@ def update_custom_metric_state(projectId: int, metric_id: int,
status=data.active)}
@app.delete('/{projectId}/cards/{metric_id}', tags=["dashboard"])
@app.delete('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def delete_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)):

View file

@ -30,7 +30,7 @@ def get_session_events(projectKey: str, sessionId: int):
if projectId is None:
return {"errors": ["invalid projectKey"]}
return {
'data': events.get_by_sessionId2_pg(
'data': events.get_by_session_id(
project_id=projectId,
session_id=sessionId
)

3
api/run-alerts-dev.sh Executable file
View file

@ -0,0 +1,3 @@
#!/bin/zsh
uvicorn app_alerts:app --reload

View file

@ -15,6 +15,10 @@ def transform_email(email: str) -> str:
return email.lower().strip() if isinstance(email, str) else email
def remove_whitespace(value: str) -> str:
return " ".join(value.split()) if isinstance(value, str) else value
class _Grecaptcha(BaseModel):
g_recaptcha_response: Optional[str] = Field(None, alias='g-recaptcha-response')
@ -64,7 +68,8 @@ class UpdateTenantSchema(BaseModel):
class CreateProjectSchema(BaseModel):
name: str = Field("my first project")
name: str = Field(default="my first project")
_transform_name = validator('name', pre=True, allow_reuse=True)(remove_whitespace)
class CurrentAPIContext(BaseModel):
@ -78,14 +83,15 @@ class CurrentContext(CurrentAPIContext):
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class AddSlackSchema(BaseModel):
class AddCollaborationSchema(BaseModel):
name: str = Field(...)
url: HttpUrl = Field(...)
_transform_name = validator('name', pre=True, allow_reuse=True)(remove_whitespace)
_transform_url = validator('url', pre=True, allow_reuse=True)(remove_whitespace)
class EditSlackSchema(BaseModel):
class EditCollaborationSchema(AddCollaborationSchema):
name: Optional[str] = Field(None)
url: HttpUrl = Field(...)
class CreateNotificationSchema(BaseModel):
@ -93,7 +99,19 @@ class CreateNotificationSchema(BaseModel):
notifications: List = Field(...)
class NotificationsViewSchema(BaseModel):
class _TimedSchema(BaseModel):
startTimestamp: int = Field(default=None)
endTimestamp: int = Field(default=None)
@root_validator
def time_validator(cls, values):
if values.get("startTimestamp") is not None and values.get("endTimestamp") is not None:
assert values.get("startTimestamp") < values.get("endTimestamp"), \
"endTimestamp must be greater than startTimestamp"
return values
class NotificationsViewSchema(_TimedSchema):
ids: Optional[List] = Field(default=[])
startTimestamp: Optional[int] = Field(default=None)
endTimestamp: Optional[int] = Field(default=None)
@ -117,6 +135,7 @@ class CreateEditWebhookSchema(BaseModel):
endpoint: str = Field(...)
authHeader: Optional[str] = Field(None)
name: Optional[str] = Field(...)
_transform_name = validator('name', pre=True, allow_reuse=True)(remove_whitespace)
class CreateMemberSchema(BaseModel):
@ -126,12 +145,15 @@ class CreateMemberSchema(BaseModel):
admin: bool = Field(False)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
_transform_name = validator('name', pre=True, allow_reuse=True)(remove_whitespace)
class EditMemberSchema(EditUserSchema):
name: str = Field(...)
email: EmailStr = Field(...)
admin: bool = Field(False)
_transform_name = validator('name', pre=True, allow_reuse=True)(remove_whitespace)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditPasswordByInvitationSchema(BaseModel):
@ -145,6 +167,7 @@ class AssignmentSchema(BaseModel):
description: str = Field(...)
title: str = Field(...)
issue_type: str = Field(...)
_transform_title = validator('title', pre=True, allow_reuse=True)(remove_whitespace)
class Config:
alias_generator = attribute_to_camel_case
@ -177,12 +200,6 @@ class WeeklyReportConfigSchema(BaseModel):
alias_generator = attribute_to_camel_case
class GetHeatmapPayloadSchema(BaseModel):
startDate: int = Field(TimeUTC.now(delta_days=-30))
endDate: int = Field(TimeUTC.now())
url: str = Field(...)
class DatadogSchema(BaseModel):
apiKey: str = Field(...)
applicationKey: str = Field(...)
@ -241,6 +258,7 @@ class SumologicSchema(BaseModel):
class MetadataBasicSchema(BaseModel):
index: Optional[int] = Field(None)
key: str = Field(...)
_transform_key = validator('key', pre=True, allow_reuse=True)(remove_whitespace)
class MetadataListSchema(BaseModel):
@ -345,7 +363,8 @@ class AlertSchema(BaseModel):
@root_validator(pre=True)
def transform_alert(cls, values):
if values.get("seriesId") is None and isinstance(values["query"]["left"], int):
values["seriesId"] = None
if isinstance(values["query"]["left"], int):
values["seriesId"] = values["query"]["left"]
values["query"]["left"] = AlertColumn.custom
@ -378,59 +397,59 @@ class ErrorSource(str, Enum):
class EventType(str, Enum):
click = "CLICK"
input = "INPUT"
location = "LOCATION"
custom = "CUSTOM"
request = "REQUEST"
request_details = "FETCH"
graphql = "GRAPHQL"
state_action = "STATEACTION"
error = "ERROR"
click_ios = "CLICK_IOS"
input_ios = "INPUT_IOS"
view_ios = "VIEW_IOS"
custom_ios = "CUSTOM_IOS"
request_ios = "REQUEST_IOS"
error_ios = "ERROR_IOS"
click = "click"
input = "input"
location = "location"
custom = "custom"
request = "request"
request_details = "fetch"
graphql = "graphql"
state_action = "stateAction"
error = "error"
click_ios = "clickIos"
input_ios = "inputIos"
view_ios = "viewIos"
custom_ios = "customIos"
request_ios = "requestIos"
error_ios = "errorIos"
class PerformanceEventType(str, Enum):
location_dom_complete = "DOM_COMPLETE"
location_largest_contentful_paint_time = "LARGEST_CONTENTFUL_PAINT_TIME"
time_between_events = "TIME_BETWEEN_EVENTS"
location_ttfb = "TTFB"
location_avg_cpu_load = "AVG_CPU_LOAD"
location_avg_memory_usage = "AVG_MEMORY_USAGE"
fetch_failed = "FETCH_FAILED"
location_dom_complete = "domComplete"
location_largest_contentful_paint_time = "largestContentfulPaintTime"
time_between_events = "timeBetweenEvents"
location_ttfb = "ttfb"
location_avg_cpu_load = "avgCpuLoad"
location_avg_memory_usage = "avgMemoryUsage"
fetch_failed = "fetchFailed"
# fetch_duration = "FETCH_DURATION"
class FilterType(str, Enum):
user_os = "USEROS"
user_browser = "USERBROWSER"
user_device = "USERDEVICE"
user_country = "USERCOUNTRY"
user_id = "USERID"
user_anonymous_id = "USERANONYMOUSID"
referrer = "REFERRER"
rev_id = "REVID"
user_os = "userOs"
user_browser = "userBrowser"
user_device = "userDevice"
user_country = "userCountry"
user_id = "userId"
user_anonymous_id = "userAnonymousId"
referrer = "referrer"
rev_id = "revId"
# IOS
user_os_ios = "USEROS_IOS"
user_device_ios = "USERDEVICE_IOS"
user_country_ios = "USERCOUNTRY_IOS"
user_id_ios = "USERID_IOS"
user_anonymous_id_ios = "USERANONYMOUSID_IOS"
rev_id_ios = "REVID_IOS"
user_os_ios = "userOsIos"
user_device_ios = "userDeviceIos"
user_country_ios = "userCountryIos"
user_id_ios = "userIdIos"
user_anonymous_id_ios = "userAnonymousIdIos"
rev_id_ios = "revIdIos"
#
duration = "DURATION"
platform = "PLATFORM"
metadata = "METADATA"
issue = "ISSUE"
events_count = "EVENTS_COUNT"
utm_source = "UTM_SOURCE"
utm_medium = "UTM_MEDIUM"
utm_campaign = "UTM_CAMPAIGN"
duration = "duration"
platform = "platform"
metadata = "metadata"
issue = "issue"
events_count = "eventsCount"
utm_source = "utmSource"
utm_medium = "utmMedium"
utm_campaign = "utmCampaign"
class SearchEventOperator(str, Enum):
@ -447,6 +466,15 @@ class SearchEventOperator(str, Enum):
_ends_with = "endsWith"
class ClickEventExtraOperator(str, Enum):
_on_selector = "onSelector"
_on_text = "onText"
class IssueFilterOperator(str, Enum):
_on_selector = ClickEventExtraOperator._on_selector.value
class PlatformType(str, Enum):
mobile = "mobile"
desktop = "desktop"
@ -507,19 +535,23 @@ class HttpMethod(str, Enum):
class FetchFilterType(str, Enum):
_url = "FETCH_URL"
_status_code = "FETCH_STATUS_CODE"
_method = "FETCH_METHOD"
_duration = "FETCH_DURATION"
_request_body = "FETCH_REQUEST_BODY"
_response_body = "FETCH_RESPONSE_BODY"
_url = "fetchUrl" # FETCH_URL
_status_code = "fetchStatusCode" # FETCH_STATUS_CODE
_method = "fetchMethod" # FETCH_METHOD
_duration = "fetchDuration" # FETCH_DURATION
_request_body = "fetchRequestBody" # FETCH_REQUEST_BODY
_response_body = "fetchResponseBody" # FETCH_RESPONSE_BODY
class GraphqlFilterType(str, Enum):
_name = "GRAPHQL_NAME"
_method = "GRAPHQL_METHOD"
_request_body = "GRAPHQL_REQUEST_BODY"
_response_body = "GRAPHQL_RESPONSE_BODY"
_name = "graphqlName" # GRAPHQL_NAME
_method = "graphqlMethod" # GRAPHQL_METHOD
_request_body = "graphqlRequestBody" # GRAPHQL_REQUEST_BODY
_response_body = "graphqlResponseBody" # GRAPHQL_RESPONSE_BODY
class IssueFilterType(str, Enum):
_selector = "CLICK_SELECTOR"
class RequestGraphqlFilterSchema(BaseModel):
@ -528,14 +560,50 @@ class RequestGraphqlFilterSchema(BaseModel):
operator: Union[SearchEventOperator, MathOperator] = Field(...)
class IssueFilterSchema(BaseModel):
type: IssueFilterType = Field(...)
value: List[str] = Field(...)
operator: IssueFilterOperator = Field(...)
class _SessionSearchEventRaw(__MixedSearchFilter):
is_event: bool = Field(default=True, const=True)
value: List[str] = Field(...)
type: Union[EventType, PerformanceEventType] = Field(...)
operator: SearchEventOperator = Field(...)
source: Optional[List[Union[ErrorSource, int, str]]] = Field(None)
sourceOperator: Optional[MathOperator] = Field(None)
filters: Optional[List[RequestGraphqlFilterSchema]] = Field(None)
operator: Union[SearchEventOperator, ClickEventExtraOperator] = Field(...)
source: Optional[List[Union[ErrorSource, int, str]]] = Field(default=None)
sourceOperator: Optional[MathOperator] = Field(default=None)
filters: Optional[List[Union[RequestGraphqlFilterSchema, IssueFilterSchema]]] = Field(default=None)
@root_validator(pre=True)
def transform(cls, values):
if values.get("type") is None:
return values
values["type"] = {
"CLICK": EventType.click.value,
"INPUT": EventType.input.value,
"LOCATION": EventType.location.value,
"CUSTOM": EventType.custom.value,
"REQUEST": EventType.request.value,
"FETCH": EventType.request_details.value,
"GRAPHQL": EventType.graphql.value,
"STATEACTION": EventType.state_action.value,
"ERROR": EventType.error.value,
"CLICK_IOS": EventType.click_ios.value,
"INPUT_IOS": EventType.input_ios.value,
"VIEW_IOS": EventType.view_ios.value,
"CUSTOM_IOS": EventType.custom_ios.value,
"REQUEST_IOS": EventType.request_ios.value,
"ERROR_IOS": EventType.error_ios.value,
"DOM_COMPLETE": PerformanceEventType.location_dom_complete.value,
"LARGEST_CONTENTFUL_PAINT_TIME": PerformanceEventType.location_largest_contentful_paint_time.value,
"TIME_BETWEEN_EVENTS": PerformanceEventType.time_between_events.value,
"TTFB": PerformanceEventType.location_ttfb.value,
"AVG_CPU_LOAD": PerformanceEventType.location_avg_cpu_load.value,
"AVG_MEMORY_USAGE": PerformanceEventType.location_avg_memory_usage.value,
"FETCH_FAILED": PerformanceEventType.fetch_failed.value,
}.get(values["type"], values["type"])
return values
@root_validator
def event_validator(cls, values):
@ -548,7 +616,7 @@ class _SessionSearchEventRaw(__MixedSearchFilter):
assert values.get("sourceOperator") is not None, \
"sourceOperator should not be null for PerformanceEventType"
if values["type"] == PerformanceEventType.time_between_events:
assert values["sourceOperator"] != MathOperator._equal.value, \
assert values["sourceOperator"] != MathOperator._equal, \
f"{MathOperator._equal} is not allowed for duration of {PerformanceEventType.time_between_events}"
assert len(values.get("value", [])) == 2, \
f"must provide 2 Events as value for {PerformanceEventType.time_between_events}"
@ -566,11 +634,14 @@ class _SessionSearchEventRaw(__MixedSearchFilter):
values["source"] = [ErrorSource.js_exception]
elif values.get("type") == EventType.request_details:
assert isinstance(values.get("filters"), List) and len(values.get("filters", [])) > 0, \
f"filters should be defined for {EventType.request_details.value}"
f"filters should be defined for {EventType.request_details}"
elif values.get("type") == EventType.graphql:
assert isinstance(values.get("filters"), List) and len(values.get("filters", [])) > 0, \
f"filters should be defined for {EventType.graphql.value}"
f"filters should be defined for {EventType.graphql}"
if isinstance(values.get("operator"), ClickEventExtraOperator):
assert values.get("type") == EventType.click, \
f"operator:{values['operator']} is only available for event-type: {EventType.click}"
return values
@ -580,11 +651,44 @@ class _SessionSearchEventSchema(_SessionSearchEventRaw):
class SessionSearchFilterSchema(__MixedSearchFilter):
is_event: bool = Field(False, const=False)
value: Union[Optional[Union[IssueType, PlatformType, int, str]],
Optional[List[Union[IssueType, PlatformType, int, str]]]] = Field(...)
# TODO: remove this if there nothing broken from the UI
# value: Union[Optional[Union[IssueType, PlatformType, int, str]],
# Optional[List[Union[IssueType, PlatformType, int, str]]]] = Field(...)
value: List[Union[IssueType, PlatformType, int, str]] = Field(default=[])
type: FilterType = Field(...)
operator: Union[SearchEventOperator, MathOperator] = Field(...)
source: Optional[Union[ErrorSource, str]] = Field(default=None)
filters: List[IssueFilterSchema] = Field(default=[])
@root_validator(pre=True)
def transform(cls, values):
if values.get("type") is None:
return values
values["type"] = {
"USEROS": FilterType.user_os.value,
"USERBROWSER": FilterType.user_browser.value,
"USERDEVICE": FilterType.user_device.value,
"USERCOUNTRY": FilterType.user_country.value,
"USERID": FilterType.user_id.value,
"USERANONYMOUSID": FilterType.user_anonymous_id.value,
"REFERRER": FilterType.referrer.value,
"REVID": FilterType.rev_id.value,
"USEROS_IOS": FilterType.user_os_ios.value,
"USERDEVICE_IOS": FilterType.user_device_ios.value,
"USERCOUNTRY_IOS": FilterType.user_country_ios.value,
"USERID_IOS": FilterType.user_id_ios.value,
"USERANONYMOUSID_IOS": FilterType.user_anonymous_id_ios.value,
"REVID_IOS": FilterType.rev_id_ios.value,
"DURATION": FilterType.duration.value,
"PLATFORM": FilterType.platform.value,
"METADATA": FilterType.metadata.value,
"ISSUE": FilterType.issue.value,
"EVENTS_COUNT": FilterType.events_count.value,
"UTM_SOURCE": FilterType.utm_source.value,
"UTM_MEDIUM": FilterType.utm_medium.value,
"UTM_CAMPAIGN": FilterType.utm_campaign.value
}.get(values["type"], values["type"])
return values
@root_validator
def filter_validator(cls, values):
@ -632,7 +736,12 @@ class SessionsSearchPayloadSchema(_PaginatedSchema):
@root_validator(pre=True)
def transform_order(cls, values):
if values.get("order") is not None:
if values.get("sort") is None:
values["sort"] = "startTs"
if values.get("order") is None:
values["order"] = SortOrderType.desc
else:
values["order"] = values["order"].upper()
return values
@ -698,18 +807,12 @@ class FunnelSearchPayloadSchema(FlatSessionsSearchPayloadSchema):
class FunnelSchema(BaseModel):
name: str = Field(...)
filter: FunnelSearchPayloadSchema = Field([])
is_public: bool = Field(False)
is_public: bool = Field(default=False)
class Config:
alias_generator = attribute_to_camel_case
class UpdateFunnelSchema(FunnelSchema):
name: Optional[str] = Field(None)
filter: Optional[FunnelSearchPayloadSchema] = Field(None)
is_public: Optional[bool] = Field(None)
class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema):
# class FunnelInsightsPayloadSchema(SessionsSearchPayloadSchema):
sort: Optional[str] = Field(None)
@ -739,7 +842,7 @@ class SearchErrorsSchema(FlatSessionsSearchPayloadSchema):
query: Optional[str] = Field(default=None)
class MetricPayloadSchema(BaseModel):
class MetricPayloadSchema(_TimedSchema):
startTimestamp: int = Field(TimeUTC.now(delta_days=-1))
endTimestamp: int = Field(TimeUTC.now())
density: int = Field(7)
@ -764,19 +867,19 @@ class MobileSignPayloadSchema(BaseModel):
keys: List[str] = Field(...)
class CustomMetricSeriesFilterSchema(SearchErrorsSchema):
startDate: Optional[int] = Field(None)
endDate: Optional[int] = Field(None)
sort: Optional[str] = Field(None)
order: Optional[str] = Field(None)
class CardSeriesFilterSchema(SearchErrorsSchema):
startDate: Optional[int] = Field(default=None)
endDate: Optional[int] = Field(default=None)
sort: Optional[str] = Field(default=None)
order: SortOrderType = Field(default=SortOrderType.desc)
group_by_user: Optional[bool] = Field(default=False, const=True)
class CustomMetricCreateSeriesSchema(BaseModel):
class CardCreateSeriesSchema(BaseModel):
series_id: Optional[int] = Field(None)
name: Optional[str] = Field(None)
index: Optional[int] = Field(None)
filter: Optional[CustomMetricSeriesFilterSchema] = Field([])
filter: Optional[CardSeriesFilterSchema] = Field([])
class Config:
alias_generator = attribute_to_camel_case
@ -793,114 +896,229 @@ class MetricTableViewType(str, Enum):
pie_chart = "pieChart"
class MetricOtherViewType(str, Enum):
other_chart = "chart"
list_chart = "list"
class MetricType(str, Enum):
timeseries = "timeseries"
table = "table"
predefined = "predefined"
funnel = "funnel"
errors = "errors"
performance = "performance"
resources = "resources"
web_vital = "webVitals"
pathAnalysis = "pathAnalysis"
retention = "retention"
stickiness = "stickiness"
click_map = "clickMap"
insights = "insights"
class TableMetricOfType(str, Enum):
class MetricOfErrors(str, Enum):
calls_errors = "callsErrors" # calls_errors
domains_errors_4xx = "domainsErrors4xx" # domains_errors_4xx
domains_errors_5xx = "domainsErrors5xx" # domains_errors_5xx
errors_per_domains = "errorsPerDomains" # errors_per_domains
errors_per_type = "errorsPerType" # errors_per_type
impacted_sessions_by_js_errors = "impactedSessionsByJsErrors" # impacted_sessions_by_js_errors
resources_by_party = "resourcesByParty" # resources_by_party
class MetricOfPerformance(str, Enum):
cpu = "cpu" # cpu
crashes = "crashes" # crashes
fps = "fps" # fps
impacted_sessions_by_slow_pages = "impactedSessionsBySlowPages" # impacted_sessions_by_slow_pages
memory_consumption = "memoryConsumption" # memory_consumption
pages_dom_buildtime = "pagesDomBuildtime" # pages_dom_buildtime
pages_response_time = "pagesResponseTime" # pages_response_time
pages_response_time_distribution = "pagesResponseTimeDistribution" # pages_response_time_distribution
resources_vs_visually_complete = "resourcesVsVisuallyComplete" # resources_vs_visually_complete
sessions_per_browser = "sessionsPerBrowser" # sessions_per_browser
slowest_domains = "slowestDomains" # slowest_domains
speed_location = "speedLocation" # speed_location
time_to_render = "timeToRender" # time_to_render
class MetricOfResources(str, Enum):
missing_resources = "missingResources" # missing_resources
resources_count_by_type = "resourcesCountByType" # resources_count_by_type
resources_loading_time = "resourcesLoadingTime" # resources_loading_time
resource_type_vs_response_end = "resourceTypeVsResponseEnd" # resource_type_vs_response_end
slowest_resources = "slowestResources" # slowest_resources
class MetricOfWebVitals(str, Enum):
avg_cpu = "avgCpu" # avg_cpu
avg_dom_content_loaded = "avgDomContentLoaded" # avg_dom_content_loaded
avg_dom_content_load_start = "avgDomContentLoadStart" # avg_dom_content_load_start
avg_first_contentful_pixel = "avgFirstContentfulPixel" # avg_first_contentful_pixel
avg_first_paint = "avgFirstPaint" # avg_first_paint
avg_fps = "avgFps" # avg_fps
avg_image_load_time = "avgImageLoadTime" # avg_image_load_time
avg_page_load_time = "avgPageLoadTime" # avg_page_load_time
avg_pages_dom_buildtime = "avgPagesDomBuildtime" # avg_pages_dom_buildtime
avg_pages_response_time = "avgPagesResponseTime" # avg_pages_response_time
avg_request_load_time = "avgRequestLoadTime" # avg_request_load_time
avg_response_time = "avgResponseTime" # avg_response_time
avg_session_duration = "avgSessionDuration" # avg_session_duration
avg_till_first_byte = "avgTillFirstByte" # avg_till_first_byte
avg_time_to_interactive = "avgTimeToInteractive" # avg_time_to_interactive
avg_time_to_render = "avgTimeToRender" # avg_time_to_render
avg_used_js_heap_size = "avgUsedJsHeapSize" # avg_used_js_heap_size
avg_visited_pages = "avgVisitedPages" # avg_visited_pages
count_requests = "countRequests" # count_requests
count_sessions = "countSessions" # count_sessions
class MetricOfTable(str, Enum):
user_os = FilterType.user_os.value
user_browser = FilterType.user_browser.value
user_device = FilterType.user_device.value
user_country = FilterType.user_country.value
user_id = FilterType.user_id.value
issues = FilterType.issue.value
visited_url = EventType.location.value
sessions = "SESSIONS"
errors = IssueType.js_exception.value
visited_url = "location"
sessions = "sessions"
errors = "jsException"
class TimeseriesMetricOfType(str, Enum):
class MetricOfTimeseries(str, Enum):
session_count = "sessionCount"
class CustomMetricSessionsPayloadSchema(FlatSessionsSearch, _PaginatedSchema):
class MetricOfClickMap(str, Enum):
click_map_url = "clickMapUrl"
class CardSessionsSchema(FlatSessionsSearch, _PaginatedSchema, _TimedSchema):
startTimestamp: int = Field(TimeUTC.now(-7))
endTimestamp: int = Field(TimeUTC.now())
series: Optional[List[CustomMetricCreateSeriesSchema]] = Field(default=None)
series: List[CardCreateSeriesSchema] = Field(default=[])
class Config:
alias_generator = attribute_to_camel_case
class CustomMetricChartPayloadSchema(CustomMetricSessionsPayloadSchema, _PaginatedSchema):
class CardChartSchema(CardSessionsSchema):
density: int = Field(7)
class Config:
alias_generator = attribute_to_camel_case
class TryCustomMetricsPayloadSchema(CustomMetricChartPayloadSchema):
name: str = Field(...)
series: List[CustomMetricCreateSeriesSchema] = Field(...)
is_public: bool = Field(default=True)
view_type: Union[MetricTimeseriesViewType, MetricTableViewType] = Field(MetricTimeseriesViewType.line_chart)
metric_type: MetricType = Field(MetricType.timeseries)
metric_of: Union[TableMetricOfType, TimeseriesMetricOfType] = Field(TableMetricOfType.user_id)
metric_value: List[IssueType] = Field([])
metric_format: Optional[MetricFormatType] = Field(None)
# metricFraction: float = Field(None, gt=0, lt=1)
# This is used to handle wrong values sent by the UI
@root_validator(pre=True)
def remove_metric_value(cls, values):
if values.get("metricType") == MetricType.timeseries \
or values.get("metricType") == MetricType.table \
and values.get("metricOf") != TableMetricOfType.issues:
values["metricValue"] = []
return values
@root_validator
def validator(cls, values):
if values.get("metric_type") == MetricType.table:
assert isinstance(values.get("view_type"), MetricTableViewType), \
f"viewType must be of type {MetricTableViewType} for metricType:{MetricType.table.value}"
assert isinstance(values.get("metric_of"), TableMetricOfType), \
f"metricOf must be of type {TableMetricOfType} for metricType:{MetricType.table.value}"
if values.get("metric_of") != TableMetricOfType.issues:
assert values.get("metric_value") is None or len(values.get("metric_value")) == 0, \
f"metricValue is only available for metricOf:{TableMetricOfType.issues.value}"
elif values.get("metric_type") == MetricType.timeseries:
assert isinstance(values.get("view_type"), MetricTimeseriesViewType), \
f"viewType must be of type {MetricTimeseriesViewType} for metricType:{MetricType.timeseries.value}"
assert isinstance(values.get("metric_of"), TimeseriesMetricOfType), \
f"metricOf must be of type {TimeseriesMetricOfType} for metricType:{MetricType.timeseries.value}"
return values
class Config:
alias_generator = attribute_to_camel_case
class CustomMetricsConfigSchema(BaseModel):
class CardConfigSchema(BaseModel):
col: Optional[int] = Field(...)
row: Optional[int] = Field(default=2)
position: Optional[int] = Field(default=0)
class CreateCustomMetricsSchema(TryCustomMetricsPayloadSchema):
series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1)
config: CustomMetricsConfigSchema = Field(...)
class CreateCardSchema(CardChartSchema):
name: Optional[str] = Field(...)
is_public: bool = Field(default=True)
view_type: Union[MetricTimeseriesViewType, \
MetricTableViewType, MetricOtherViewType] = Field(...)
metric_type: MetricType = Field(...)
metric_of: Union[MetricOfTimeseries, MetricOfTable, MetricOfErrors, \
MetricOfPerformance, MetricOfResources, MetricOfWebVitals, \
MetricOfClickMap] = Field(MetricOfTable.user_id)
metric_value: List[IssueType] = Field(default=[])
metric_format: Optional[MetricFormatType] = Field(default=None)
default_config: CardConfigSchema = Field(..., alias="config")
is_template: bool = Field(default=False)
thumbnail: Optional[str] = Field(default=None)
# This is used to handle wrong values sent by the UI
@root_validator(pre=True)
def transform_series(cls, values):
if values.get("series") is not None and len(values["series"]) > 1 and values.get(
"metric_type") == MetricType.funnel.value:
def transform(cls, values):
values["isTemplate"] = values.get("metricType") in [MetricType.errors, MetricType.performance,
MetricType.resources, MetricType.web_vital]
if values.get("metricType") == MetricType.timeseries \
or values.get("metricType") == MetricType.table \
and values.get("metricOf") != MetricOfTable.issues:
values["metricValue"] = []
if values.get("metricType") == MetricType.funnel and \
values.get("series") is not None and len(values["series"]) > 0:
values["series"] = [values["series"][0]]
elif values.get("metricType") not in [MetricType.table,
MetricType.timeseries,
MetricType.insights,
MetricType.click_map,
MetricType.funnel] \
and values.get("series") is not None and len(values["series"]) > 0:
values["series"] = []
return values
@root_validator
def restrictions(cls, values):
assert values.get("metric_type") != MetricType.insights, f"metricType:{MetricType.insights} not supported yet"
return values
class CustomMetricUpdateSeriesSchema(CustomMetricCreateSeriesSchema):
@root_validator
def validator(cls, values):
if values.get("metric_type") == MetricType.timeseries:
assert isinstance(values.get("view_type"), MetricTimeseriesViewType), \
f"viewType must be of type {MetricTimeseriesViewType} for metricType:{MetricType.timeseries}"
assert isinstance(values.get("metric_of"), MetricOfTimeseries), \
f"metricOf must be of type {MetricOfTimeseries} for metricType:{MetricType.timeseries}"
elif values.get("metric_type") == MetricType.table:
assert isinstance(values.get("view_type"), MetricTableViewType), \
f"viewType must be of type {MetricTableViewType} for metricType:{MetricType.table}"
assert isinstance(values.get("metric_of"), MetricOfTable), \
f"metricOf must be of type {MetricOfTable} for metricType:{MetricType.table}"
if values.get("metric_of") in (MetricOfTable.sessions, MetricOfTable.errors):
assert values.get("view_type") == MetricTableViewType.table, \
f"viewType must be '{MetricTableViewType.table}' for metricOf:{values['metric_of']}"
if values.get("metric_of") != MetricOfTable.issues:
assert values.get("metric_value") is None or len(values.get("metric_value")) == 0, \
f"metricValue is only available for metricOf:{MetricOfTable.issues}"
elif values.get("metric_type") == MetricType.funnel:
pass
# allow UI sot send empty series for funnel
# assert len(values["series"]) == 1, f"must have only 1 series for metricType:{MetricType.funnel}"
# ignore this for now, let the UI send whatever he wants for metric_of
# assert isinstance(values.get("metric_of"), MetricOfTimeseries), \
# f"metricOf must be of type {MetricOfTimeseries} for metricType:{MetricType.funnel}"
else:
if values.get("metric_type") == MetricType.errors:
assert isinstance(values.get("metric_of"), MetricOfErrors), \
f"metricOf must be of type {MetricOfErrors} for metricType:{MetricType.errors}"
elif values.get("metric_type") == MetricType.performance:
assert isinstance(values.get("metric_of"), MetricOfPerformance), \
f"metricOf must be of type {MetricOfPerformance} for metricType:{MetricType.performance}"
elif values.get("metric_type") == MetricType.resources:
assert isinstance(values.get("metric_of"), MetricOfResources), \
f"metricOf must be of type {MetricOfResources} for metricType:{MetricType.resources}"
elif values.get("metric_type") == MetricType.web_vital:
assert isinstance(values.get("metric_of"), MetricOfWebVitals), \
f"metricOf must be of type {MetricOfWebVitals} for metricType:{MetricType.web_vital}"
elif values.get("metric_type") == MetricType.click_map:
assert isinstance(values.get("metric_of"), MetricOfClickMap), \
f"metricOf must be of type {MetricOfClickMap} for metricType:{MetricType.click_map}"
# Allow only LOCATION events for clickMap
for s in values.get("series", []):
for f in s.filter.events:
assert f.type == EventType.location, f"only events of type:{EventType.location} are allowed for metricOf:{MetricType.click_map}"
assert isinstance(values.get("view_type"), MetricOtherViewType), \
f"viewType must be 'chart|list' for metricOf:{values.get('metric_of')}"
return values
class Config:
alias_generator = attribute_to_camel_case
class CardUpdateSeriesSchema(CardCreateSeriesSchema):
series_id: Optional[int] = Field(None)
class Config:
alias_generator = attribute_to_camel_case
class UpdateCustomMetricsSchema(CreateCustomMetricsSchema):
series: List[CustomMetricUpdateSeriesSchema] = Field(..., min_items=1)
class UpdateCardSchema(CreateCardSchema):
series: List[CardUpdateSeriesSchema] = Field(...)
class UpdateCustomMetricsStatusSchema(BaseModel):
@ -941,55 +1159,6 @@ class AddWidgetToDashboardPayloadSchema(UpdateWidgetPayloadSchema):
alias_generator = attribute_to_camel_case
# these values should match the keys in metrics table
class TemplatePredefinedKeys(str, Enum):
count_sessions = "count_sessions"
avg_request_load_time = "avg_request_load_time"
avg_page_load_time = "avg_page_load_time"
avg_image_load_time = "avg_image_load_time"
avg_dom_content_load_start = "avg_dom_content_load_start"
avg_first_contentful_pixel = "avg_first_contentful_pixel"
avg_visited_pages = "avg_visited_pages"
avg_session_duration = "avg_session_duration"
avg_pages_dom_buildtime = "avg_pages_dom_buildtime"
avg_pages_response_time = "avg_pages_response_time"
avg_response_time = "avg_response_time"
avg_first_paint = "avg_first_paint"
avg_dom_content_loaded = "avg_dom_content_loaded"
avg_till_first_bit = "avg_till_first_byte"
avg_time_to_interactive = "avg_time_to_interactive"
count_requests = "count_requests"
avg_time_to_render = "avg_time_to_render"
avg_used_js_heap_size = "avg_used_js_heap_size"
avg_cpu = "avg_cpu"
avg_fps = "avg_fps"
impacted_sessions_by_js_errors = "impacted_sessions_by_js_errors"
domains_errors_4xx = "domains_errors_4xx"
domains_errors_5xx = "domains_errors_5xx"
errors_per_domains = "errors_per_domains"
calls_errors = "calls_errors"
errors_by_type = "errors_per_type"
errors_by_origin = "resources_by_party"
speed_index_by_location = "speed_location"
slowest_domains = "slowest_domains"
sessions_per_browser = "sessions_per_browser"
time_to_render = "time_to_render"
impacted_sessions_by_slow_pages = "impacted_sessions_by_slow_pages"
memory_consumption = "memory_consumption"
cpu_load = "cpu"
frame_rate = "fps"
crashes = "crashes"
resources_vs_visually_complete = "resources_vs_visually_complete"
pages_dom_buildtime = "pages_dom_buildtime"
pages_response_time = "pages_response_time"
pages_response_time_distribution = "pages_response_time_distribution"
missing_resources = "missing_resources"
slowest_resources = "slowest_resources"
resources_fetch_time = "resources_loading_time"
resource_type_vs_response_end = "resource_type_vs_response_end"
resources_count_by_type = "resources_count_by_type"
class TemplatePredefinedUnits(str, Enum):
millisecond = "ms"
second = "s"
@ -1000,15 +1169,6 @@ class TemplatePredefinedUnits(str, Enum):
count = "count"
class CustomMetricAndTemplate(BaseModel):
is_template: bool = Field(...)
project_id: Optional[int] = Field(...)
predefined_key: Optional[TemplatePredefinedKeys] = Field(...)
class Config:
alias_generator = attribute_to_camel_case
class LiveFilterType(str, Enum):
user_os = FilterType.user_os.value
user_browser = FilterType.user_browser.value
@ -1018,25 +1178,25 @@ class LiveFilterType(str, Enum):
user_anonymous_id = FilterType.user_anonymous_id.value
rev_id = FilterType.rev_id.value
platform = FilterType.platform.value
page_title = "PAGETITLE"
session_id = "SESSIONID"
metadata = "METADATA"
user_UUID = "USERUUID"
tracker_version = "TRACKERVERSION"
user_browser_version = "USERBROWSERVERSION"
user_device_type = "USERDEVICETYPE"
page_title = "pageTitle"
session_id = "sessionId"
metadata = FilterType.metadata.value
user_UUID = "userUuid"
tracker_version = "trackerVersion"
user_browser_version = "userBrowserVersion"
user_device_type = "userDeviceType"
class LiveSessionSearchFilterSchema(BaseModel):
value: Union[List[str], str] = Field(...)
type: LiveFilterType = Field(...)
source: Optional[str] = Field(None)
operator: Literal[SearchEventOperator._is.value,
SearchEventOperator._contains.value] = Field(SearchEventOperator._contains.value)
source: Optional[str] = Field(default=None)
operator: Literal[SearchEventOperator._is, \
SearchEventOperator._contains] = Field(default=SearchEventOperator._contains)
@root_validator
def validator(cls, values):
if values.get("type") is not None and values["type"] == LiveFilterType.metadata.value:
if values.get("type") is not None and values["type"] == LiveFilterType.metadata:
assert values.get("source") is not None, "source should not be null for METADATA type"
assert len(values.get("source")) > 0, "source should not be empty for METADATA type"
return values
@ -1059,8 +1219,8 @@ class LiveSessionsSearchPayloadSchema(_PaginatedSchema):
else:
i += 1
for i in values["filters"]:
if i.get("type") == LiveFilterType.platform.value:
i["type"] = LiveFilterType.user_device_type.value
if i.get("type") == LiveFilterType.platform:
i["type"] = LiveFilterType.user_device_type
if values.get("sort") is not None:
if values["sort"].lower() == "startts":
values["sort"] = "TIMESTAMP"
@ -1074,6 +1234,7 @@ class IntegrationType(str, Enum):
github = "GITHUB"
jira = "JIRA"
slack = "SLACK"
ms_teams = "MSTEAMS"
sentry = "SENTRY"
bugsnag = "BUGSNAG"
rollbar = "ROLLBAR"
@ -1121,3 +1282,80 @@ class SessionUpdateNoteSchema(SessionNoteSchema):
break
assert c > 0, "at least 1 value should be provided for update"
return values
class WebhookType(str, Enum):
webhook = "webhook"
slack = "slack"
email = "email"
msteams = "msteams"
class SearchCardsSchema(_PaginatedSchema):
order: SortOrderType = Field(default=SortOrderType.desc)
shared_only: bool = Field(default=False)
mine_only: bool = Field(default=False)
query: Optional[str] = Field(default=None)
class Config:
alias_generator = attribute_to_camel_case
class _ClickMapSearchEventRaw(_SessionSearchEventRaw):
type: Literal[EventType.location] = Field(...)
class FlatClickMapSessionsSearch(SessionsSearchPayloadSchema):
events: Optional[List[_ClickMapSearchEventRaw]] = Field([])
filters: List[Union[SessionSearchFilterSchema, _ClickMapSearchEventRaw]] = Field([])
@root_validator(pre=True)
def transform(cls, values):
for f in values.get("filters", []):
if f.get("type") == FilterType.duration:
return values
values["filters"] = values.get("filters", [])
values["filters"].append({"value": [5000], "type": FilterType.duration,
"operator": SearchEventOperator._is, "filters": []})
return values
@root_validator()
def flat_to_original(cls, values):
if len(values["events"]) > 0:
return values
n_filters = []
n_events = []
for v in values.get("filters", []):
if isinstance(v, _ClickMapSearchEventRaw):
n_events.append(v)
else:
n_filters.append(v)
values["events"] = n_events
values["filters"] = n_filters
return values
class IssueAdvancedFilter(BaseModel):
type: IssueFilterType = Field(default=IssueFilterType._selector)
value: List[str] = Field(default=[])
operator: SearchEventOperator = Field(default=SearchEventOperator._is)
class ClickMapFilterSchema(BaseModel):
value: List[Literal[IssueType.click_rage, IssueType.dead_click]] = Field(default=[])
type: Literal[FilterType.issue] = Field(...)
operator: Literal[SearchEventOperator._is, MathOperator._equal] = Field(...)
# source: Optional[Union[ErrorSource, str]] = Field(default=None)
filters: List[IssueAdvancedFilter] = Field(default=[])
class GetHeatmapPayloadSchema(BaseModel):
startDate: int = Field(TimeUTC.now(delta_days=-30))
endDate: int = Field(TimeUTC.now())
url: str = Field(...)
# issues: List[Literal[IssueType.click_rage, IssueType.dead_click]] = Field(default=[])
filters: List[ClickMapFilterSchema] = Field(default=[])
click_rage: bool = Field(default=False)
class Config:
alias_generator = attribute_to_camel_case

View file

@ -1,4 +1,4 @@
FROM golang:1.18-alpine3.15 AS prepare
FROM golang:1.18-alpine3.17 AS prepare
RUN apk add --no-cache git openssh openssl-dev pkgconf gcc g++ make libc-dev bash librdkafka-dev cyrus-sasl cyrus-sasl-gssapiv2 krb5
@ -19,11 +19,16 @@ RUN CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o service -tags dynamic open
FROM alpine AS entrypoint
ARG GIT_SHA
LABEL GIT_SHA=$GIT_SHA
RUN apk add --no-cache ca-certificates librdkafka-dev cyrus-sasl cyrus-sasl-gssapiv2 krb5
RUN adduser -u 1001 openreplay -D
ARG SERVICE_NAME
ENV TZ=UTC \
FS_ULIMIT=1000 \
GIT_SHA=$GIT_SHA \
FS_ULIMIT=10000 \
FS_DIR=/mnt/efs \
MAXMINDDB_FILE=/home/openreplay/geoip.mmdb \
UAPARSER_FILE=/home/openreplay/regexes.yaml \
@ -71,11 +76,15 @@ ENV TZ=UTC \
BEACON_SIZE_LIMIT=1000000 \
USE_FAILOVER=false \
GROUP_STORAGE_FAILOVER=failover \
TOPIC_STORAGE_FAILOVER=storage-failover
TOPIC_STORAGE_FAILOVER=storage-failover \
SERVICE_NAME=$SERVICE_NAME \
PROFILER_ENABLED=false \
COMPRESSION_TYPE=zstd \
CH_USERNAME="default" \
CH_PASSWORD="" \
CH_DATABASE="default"
ARG SERVICE_NAME
RUN if [ "$SERVICE_NAME" = "http" ]; then \
wget https://raw.githubusercontent.com/ua-parser/uap-core/master/regexes.yaml -O "$UAPARSER_FILE" &&\
wget https://static.openreplay.com/geoip/GeoLite2-Country.mmdb -O "$MAXMINDDB_FILE"; fi

View file

@ -9,7 +9,8 @@
# Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh <ee>
set -e
git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)}
git_sha=$(git rev-parse --short HEAD)
image_tag=${IMAGE_TAG:-git_sha}
ee="false"
check_prereq() {
which docker || {
@ -22,9 +23,12 @@ check_prereq() {
function build_service() {
image="$1"
echo "BUILDING $image"
docker build -t ${DOCKER_REPO:-'local'}/$image:${git_sha1} --platform linux/amd64 --build-arg SERVICE_NAME=$image .
docker build -t ${DOCKER_REPO:-'local'}/$image:${image_tag} --platform linux/amd64 --build-arg SERVICE_NAME=$image --build-arg GIT_SHA=$git_sha .
[[ $PUSH_IMAGE -eq 1 ]] && {
docker push ${DOCKER_REPO:-'local'}/$image:${git_sha1}
docker push ${DOCKER_REPO:-'local'}/$image:${image_tag}
}
[[ $SIGN_IMAGE -eq 1 ]] && {
cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/$image:${image_tag}
}
echo "Build completed for $image"
return
@ -51,7 +55,7 @@ function build_api(){
for image in $(ls cmd);
do
build_service $image
echo "::set-output name=image::${DOCKER_REPO:-'local'}/$image:${git_sha1}"
echo "::set-output name=image::${DOCKER_REPO:-'local'}/$image:${image_tag}"
done
cd ../backend
rm -rf ../${destination}

View file

@ -1,7 +1,6 @@
package main
import (
"context"
"log"
"os"
"os/signal"
@ -12,29 +11,30 @@ import (
"openreplay/backend/internal/assets/cacher"
config "openreplay/backend/internal/config/assets"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/monitoring"
"openreplay/backend/pkg/metrics"
assetsMetrics "openreplay/backend/pkg/metrics/assets"
"openreplay/backend/pkg/pprof"
"openreplay/backend/pkg/queue"
)
func main() {
metrics := monitoring.New("assets")
m := metrics.New()
m.Register(assetsMetrics.List())
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
cfg := config.New()
cacher := cacher.NewCacher(cfg, metrics)
totalAssets, err := metrics.RegisterCounter("assets_total")
if err != nil {
log.Printf("can't create assets_total metric: %s", err)
if cfg.UseProfiler {
pprof.StartProfilingServer()
}
cacher := cacher.NewCacher(cfg)
msgHandler := func(msg messages.Message) {
switch m := msg.(type) {
case *messages.AssetCache:
cacher.CacheURL(m.SessionID(), m.URL)
totalAssets.Add(context.Background(), 1)
assetsMetrics.IncreaseProcessesSessions()
// TODO: connect to "raw" topic in order to listen for JSException
case *messages.JSException:
sourceList, err := assets.ExtractJSExceptionSources(&m.Payload)

View file

@ -3,8 +3,6 @@ package main
import (
"errors"
"log"
types2 "openreplay/backend/pkg/db/types"
"openreplay/backend/pkg/queue/types"
"os"
"os/signal"
"syscall"
@ -14,25 +12,31 @@ import (
"openreplay/backend/internal/db/datasaver"
"openreplay/backend/pkg/db/cache"
"openreplay/backend/pkg/db/postgres"
types2 "openreplay/backend/pkg/db/types"
"openreplay/backend/pkg/handlers"
custom2 "openreplay/backend/pkg/handlers/custom"
logger "openreplay/backend/pkg/log"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/monitoring"
"openreplay/backend/pkg/metrics"
databaseMetrics "openreplay/backend/pkg/metrics/database"
"openreplay/backend/pkg/pprof"
"openreplay/backend/pkg/queue"
"openreplay/backend/pkg/sessions"
)
func main() {
metrics := monitoring.New("db")
m := metrics.New()
m.Register(databaseMetrics.List())
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
cfg := db.New()
if cfg.UseProfiler {
pprof.StartProfilingServer()
}
// Init database
pg := cache.NewPGCache(
postgres.NewConn(cfg.Postgres, cfg.BatchQueueLimit, cfg.BatchSizeLimit, metrics), cfg.ProjectExpirationTimeoutMs)
postgres.NewConn(cfg.Postgres.String(), cfg.BatchQueueLimit, cfg.BatchSizeLimit), cfg.ProjectExpirationTimeoutMs)
defer pg.Close()
// HandlersFabric returns the list of message handlers we want to be applied to each incoming message.
@ -47,29 +51,20 @@ func main() {
// Create handler's aggregator
builderMap := sessions.NewBuilderMap(handlersFabric)
var producer types.Producer = nil
if cfg.UseQuickwit {
producer = queue.NewProducer(cfg.MessageSizeLimit, true)
defer producer.Close(15000)
}
// Init modules
saver := datasaver.New(pg, producer)
saver := datasaver.New(pg, cfg)
saver.InitStats()
statsLogger := logger.NewQueueStats(cfg.LoggerTimeout)
msgFilter := []int{messages.MsgMetadata, messages.MsgIssueEvent, messages.MsgSessionStart, messages.MsgSessionEnd,
messages.MsgUserID, messages.MsgUserAnonymousID, messages.MsgClickEvent,
messages.MsgIntegrationEvent, messages.MsgPerformanceTrackAggr,
messages.MsgJSException, messages.MsgResourceTiming,
messages.MsgRawCustomEvent, messages.MsgCustomIssue, messages.MsgFetch, messages.MsgGraphQL,
messages.MsgCustomEvent, messages.MsgCustomIssue, messages.MsgFetch, messages.MsgNetworkRequest, messages.MsgGraphQL,
messages.MsgStateAction, messages.MsgSetInputTarget, messages.MsgSetInputValue, messages.MsgCreateDocument,
messages.MsgMouseClick, messages.MsgSetPageLocation, messages.MsgPageLoadTiming, messages.MsgPageRenderTiming}
// Handler logic
msgHandler := func(msg messages.Message) {
statsLogger.Collect(msg)
// Just save session data into db without additional checks
if err := saver.InsertMessage(msg); err != nil {
if !postgres.IsPkeyViolation(err) {
@ -122,8 +117,8 @@ func main() {
consumer := queue.NewConsumer(
cfg.GroupDB,
[]string{
cfg.TopicRawWeb,
cfg.TopicAnalytics,
cfg.TopicRawWeb, // from tracker
cfg.TopicAnalytics, // from heuristics
},
messages.NewMessageIterator(msgHandler, msgFilter, true),
false,
@ -139,30 +134,34 @@ func main() {
// Send collected batches to db
commitDBUpdates := func() {
start := time.Now()
pg.CommitBatches()
pgDur := time.Now().Sub(start).Milliseconds()
start = time.Now()
// Commit collected batches and bulks of information to PG
pg.Commit()
// Commit collected batches of information to CH
if err := saver.CommitStats(); err != nil {
log.Printf("Error on stats commit: %v", err)
}
chDur := time.Now().Sub(start).Milliseconds()
log.Printf("commit duration(ms), pg: %d, ch: %d", pgDur, chDur)
// Commit current position in queue
if err := consumer.Commit(); err != nil {
log.Printf("Error on consumer commit: %v", err)
}
}
for {
select {
case sig := <-sigchan:
log.Printf("Caught signal %s: terminating\n", sig.String())
commitDBUpdates()
if err := pg.Close(); err != nil {
log.Printf("db.Close error: %s", err)
}
if err := saver.Close(); err != nil {
log.Printf("saver.Close error: %s", err)
}
consumer.Close()
os.Exit(0)
case <-commitTick:
commitDBUpdates()
builderMap.ClearOldSessions()
case msg := <-consumer.Rebalanced():
log.Println(msg)
default:

View file

@ -2,32 +2,42 @@ package main
import (
"log"
"openreplay/backend/internal/storage"
"os"
"os/signal"
"strings"
"syscall"
"time"
"openreplay/backend/internal/config/ender"
"openreplay/backend/internal/sessionender"
"openreplay/backend/internal/storage"
"openreplay/backend/pkg/db/cache"
"openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/intervals"
logger "openreplay/backend/pkg/log"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/monitoring"
"openreplay/backend/pkg/metrics"
databaseMetrics "openreplay/backend/pkg/metrics/database"
enderMetrics "openreplay/backend/pkg/metrics/ender"
"openreplay/backend/pkg/pprof"
"openreplay/backend/pkg/queue"
)
func main() {
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
metrics := monitoring.New("ender")
cfg := ender.New()
m := metrics.New()
m.Register(enderMetrics.List())
m.Register(databaseMetrics.List())
pg := cache.NewPGCache(postgres.NewConn(cfg.Postgres, 0, 0, metrics), cfg.ProjectExpirationTimeoutMs)
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
cfg := ender.New()
if cfg.UseProfiler {
pprof.StartProfilingServer()
}
pg := cache.NewPGCache(postgres.NewConn(cfg.Postgres.String(), 0, 0), cfg.ProjectExpirationTimeoutMs)
defer pg.Close()
sessions, err := sessionender.New(metrics, intervals.EVENTS_SESSION_END_TIMEOUT, cfg.PartitionsNumber, logger.NewQueueStats(cfg.LoggerTimeout))
sessions, err := sessionender.New(intervals.EVENTS_SESSION_END_TIMEOUT, cfg.PartitionsNumber)
if err != nil {
log.Printf("can't init ender service: %s", err)
return
@ -37,7 +47,7 @@ func main() {
consumer := queue.NewConsumer(
cfg.GroupEnder,
[]string{cfg.TopicRawWeb},
messages.NewMessageIterator(
messages.NewEnderMessageIterator(
func(msg messages.Message) { sessions.UpdateSession(msg) },
[]int{messages.MsgTimestamp},
false),
@ -62,6 +72,9 @@ func main() {
consumer.Close()
os.Exit(0)
case <-tick:
failedSessionEnds := make(map[uint64]int64)
duplicatedSessionEnds := make(map[uint64]uint64)
// Find ended sessions and send notification to other services
sessions.HandleEndedSessions(func(sessionID uint64, timestamp int64) bool {
msg := &messages.SessionEnd{Timestamp: uint64(timestamp)}
@ -71,12 +84,17 @@ func main() {
}
newDuration, err := pg.InsertSessionEnd(sessionID, msg.Timestamp)
if err != nil {
if strings.Contains(err.Error(), "integer out of range") {
// Skip session with broken duration
failedSessionEnds[sessionID] = timestamp
return true
}
log.Printf("can't save sessionEnd to database, sessID: %d, err: %s", sessionID, err)
return false
}
if currDuration == newDuration {
log.Printf("sessionEnd duplicate, sessID: %d, prevDur: %d, newDur: %d", sessionID,
currDuration, newDuration)
// Skip session end duplicate
duplicatedSessionEnds[sessionID] = currDuration
return true
}
if cfg.UseEncryption {
@ -94,6 +112,12 @@ func main() {
}
return true
})
if len(failedSessionEnds) > 0 {
log.Println("sessions with wrong duration:", failedSessionEnds)
}
if len(duplicatedSessionEnds) > 0 {
log.Println("session end duplicates:", duplicatedSessionEnds)
}
producer.Flush(cfg.ProducerTimeout)
if err := consumer.CommitBack(intervals.EVENTS_BACK_COMMIT_GAP); err != nil {
log.Printf("can't commit messages with offset: %s", err)

View file

@ -2,6 +2,7 @@ package main
import (
"log"
"openreplay/backend/pkg/pprof"
"os"
"os/signal"
"syscall"
@ -11,7 +12,6 @@ import (
"openreplay/backend/pkg/handlers"
web2 "openreplay/backend/pkg/handlers/web"
"openreplay/backend/pkg/intervals"
logger "openreplay/backend/pkg/log"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue"
"openreplay/backend/pkg/sessions"
@ -20,8 +20,10 @@ import (
func main() {
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
// Load service configuration
cfg := heuristics.New()
if cfg.UseProfiler {
pprof.StartProfilingServer()
}
// HandlersFabric returns the list of message handlers we want to be applied to each incoming message.
handlersFabric := func() []handlers.MessageProcessor {
@ -41,14 +43,10 @@ func main() {
// Create handler's aggregator
builderMap := sessions.NewBuilderMap(handlersFabric)
// Init logger
statsLogger := logger.NewQueueStats(cfg.LoggerTimeout)
// Init producer and consumer for data bus
producer := queue.NewProducer(cfg.MessageSizeLimit, true)
msgHandler := func(msg messages.Message) {
statsLogger.Collect(msg)
builderMap.HandleMessage(msg)
}

View file

@ -2,40 +2,48 @@ package main
import (
"log"
"openreplay/backend/internal/config/http"
"openreplay/backend/internal/http/router"
"openreplay/backend/internal/http/server"
"openreplay/backend/internal/http/services"
"openreplay/backend/pkg/monitoring"
"os"
"os/signal"
"syscall"
"openreplay/backend/internal/config/http"
"openreplay/backend/internal/http/router"
"openreplay/backend/internal/http/server"
"openreplay/backend/internal/http/services"
"openreplay/backend/pkg/db/cache"
"openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/metrics"
databaseMetrics "openreplay/backend/pkg/metrics/database"
httpMetrics "openreplay/backend/pkg/metrics/http"
"openreplay/backend/pkg/pprof"
"openreplay/backend/pkg/queue"
)
func main() {
metrics := monitoring.New("http")
m := metrics.New()
m.Register(httpMetrics.List())
m.Register(databaseMetrics.List())
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
cfg := http.New()
if cfg.UseProfiler {
pprof.StartProfilingServer()
}
// Connect to queue
producer := queue.NewProducer(cfg.MessageSizeLimit, true)
defer producer.Close(15000)
// Connect to database
dbConn := cache.NewPGCache(postgres.NewConn(cfg.Postgres, 0, 0, metrics), 1000*60*20)
dbConn := cache.NewPGCache(postgres.NewConn(cfg.Postgres.String(), 0, 0), 1000*60*20)
defer dbConn.Close()
// Build all services
services := services.New(cfg, producer, dbConn)
// Init server's routes
router, err := router.NewRouter(cfg, services, metrics)
router, err := router.NewRouter(cfg, services)
if err != nil {
log.Fatalf("failed while creating engine: %s", err)
}

View file

@ -2,29 +2,34 @@ package main
import (
"log"
config "openreplay/backend/internal/config/integrations"
"openreplay/backend/internal/integrations/clientManager"
"openreplay/backend/pkg/monitoring"
"time"
"os"
"os/signal"
"syscall"
"time"
config "openreplay/backend/internal/config/integrations"
"openreplay/backend/internal/integrations/clientManager"
"openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/intervals"
"openreplay/backend/pkg/metrics"
databaseMetrics "openreplay/backend/pkg/metrics/database"
"openreplay/backend/pkg/pprof"
"openreplay/backend/pkg/queue"
"openreplay/backend/pkg/token"
)
func main() {
metrics := monitoring.New("integrations")
m := metrics.New()
m.Register(databaseMetrics.List())
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
cfg := config.New()
if cfg.UseProfiler {
pprof.StartProfilingServer()
}
pg := postgres.NewConn(cfg.PostgresURI, 0, 0, metrics)
pg := postgres.NewConn(cfg.Postgres.String(), 0, 0)
defer pg.Close()
tokenizer := token.NewTokenizer(cfg.TokenSecret)
@ -47,7 +52,7 @@ func main() {
producer := queue.NewProducer(cfg.MessageSizeLimit, true)
defer producer.Close(15000)
listener, err := postgres.NewIntegrationsListener(cfg.PostgresURI)
listener, err := postgres.NewIntegrationsListener(cfg.Postgres.String())
if err != nil {
log.Printf("Postgres listener error: %v\n", err)
log.Fatalf("Postgres listener error")

View file

@ -1,7 +1,8 @@
package main
import (
"context"
"bytes"
"encoding/binary"
"log"
"os"
"os/signal"
@ -13,17 +14,22 @@ import (
"openreplay/backend/internal/sink/sessionwriter"
"openreplay/backend/internal/storage"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/monitoring"
"openreplay/backend/pkg/metrics"
sinkMetrics "openreplay/backend/pkg/metrics/sink"
"openreplay/backend/pkg/pprof"
"openreplay/backend/pkg/queue"
"openreplay/backend/pkg/url/assets"
)
func main() {
metrics := monitoring.New("sink")
m := metrics.New()
m.Register(sinkMetrics.List())
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
cfg := sink.New()
if cfg.UseProfiler {
pprof.StartProfilingServer()
}
if _, err := os.Stat(cfg.FsDir); os.IsNotExist(err) {
log.Fatalf("%v doesn't exist. %v", cfg.FsDir, err)
@ -34,26 +40,43 @@ func main() {
producer := queue.NewProducer(cfg.MessageSizeLimit, true)
defer producer.Close(cfg.ProducerCloseTimeout)
rewriter := assets.NewRewriter(cfg.AssetsOrigin)
assetMessageHandler := assetscache.New(cfg, rewriter, producer, metrics)
assetMessageHandler := assetscache.New(cfg, rewriter, producer)
counter := storage.NewLogCounter()
// Session message metrics
totalMessages, err := metrics.RegisterCounter("messages_total")
if err != nil {
log.Printf("can't create messages_total metric: %s", err)
}
savedMessages, err := metrics.RegisterCounter("messages_saved")
if err != nil {
log.Printf("can't create messages_saved metric: %s", err)
}
messageSize, err := metrics.RegisterHistogram("messages_size")
if err != nil {
log.Printf("can't create messages_size metric: %s", err)
}
var (
sessionID uint64
messageIndex = make([]byte, 8)
domBuffer = bytes.NewBuffer(make([]byte, 1024))
devBuffer = bytes.NewBuffer(make([]byte, 1024))
)
// Reset buffers
domBuffer.Reset()
devBuffer.Reset()
msgHandler := func(msg messages.Message) {
// [METRICS] Increase the number of processed messages
totalMessages.Add(context.Background(), 1)
// Check batchEnd signal (nil message)
if msg == nil {
// Skip empty buffers
if domBuffer.Len() <= 0 && devBuffer.Len() <= 0 {
return
}
sinkMetrics.RecordWrittenBytes(float64(domBuffer.Len()), "dom")
sinkMetrics.RecordWrittenBytes(float64(devBuffer.Len()), "devtools")
// Write buffered batches to the session
if err := writer.Write(sessionID, domBuffer.Bytes(), devBuffer.Bytes()); err != nil {
log.Printf("writer error: %s", err)
}
// Prepare buffer for the next batch
domBuffer.Reset()
devBuffer.Reset()
sessionID = 0
return
}
sinkMetrics.IncreaseTotalMessages()
// Send SessionEnd trigger to storage service
if msg.TypeID() == messages.MsgSessionEnd {
@ -98,15 +121,61 @@ func main() {
return
}
// Write message to file
if err := writer.Write(msg); err != nil {
log.Printf("writer error: %s", err)
return
// Write message to the batch buffer
if sessionID == 0 {
sessionID = msg.SessionID()
}
// [METRICS] Increase the number of written to the files messages and the message size
messageSize.Record(context.Background(), float64(len(msg.Encode())))
savedMessages.Add(context.Background(), 1)
// Encode message index
binary.LittleEndian.PutUint64(messageIndex, msg.Meta().Index)
var (
n int
err error
)
// Add message to dom buffer
if messages.IsDOMType(msg.TypeID()) {
// Write message index
n, err = domBuffer.Write(messageIndex)
if err != nil {
log.Printf("domBuffer index write err: %s", err)
}
if n != len(messageIndex) {
log.Printf("domBuffer index not full write: %d/%d", n, len(messageIndex))
}
// Write message body
n, err = domBuffer.Write(msg.Encode())
if err != nil {
log.Printf("domBuffer message write err: %s", err)
}
if n != len(msg.Encode()) {
log.Printf("domBuffer message not full write: %d/%d", n, len(messageIndex))
}
}
// Add message to dev buffer
if !messages.IsDOMType(msg.TypeID()) || msg.TypeID() == messages.MsgTimestamp {
// Write message index
n, err = devBuffer.Write(messageIndex)
if err != nil {
log.Printf("devBuffer index write err: %s", err)
}
if n != len(messageIndex) {
log.Printf("devBuffer index not full write: %d/%d", n, len(messageIndex))
}
// Write message body
n, err = devBuffer.Write(msg.Encode())
if err != nil {
log.Printf("devBuffer message write err: %s", err)
}
if n != len(msg.Encode()) {
log.Printf("devBuffer message not full write: %d/%d", n, len(messageIndex))
}
}
sinkMetrics.IncreaseWrittenMessages()
sinkMetrics.RecordMessageSize(float64(len(msg.Encode())))
}
consumer := queue.NewConsumer(
@ -114,7 +183,7 @@ func main() {
[]string{
cfg.TopicRawWeb,
},
messages.NewMessageIterator(msgHandler, nil, false),
messages.NewSinkMessageIterator(msgHandler, nil, false),
false,
cfg.MessageSizeLimit,
)

View file

@ -11,20 +11,26 @@ import (
"openreplay/backend/internal/storage"
"openreplay/backend/pkg/failover"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/monitoring"
"openreplay/backend/pkg/metrics"
storageMetrics "openreplay/backend/pkg/metrics/storage"
"openreplay/backend/pkg/pprof"
"openreplay/backend/pkg/queue"
s3storage "openreplay/backend/pkg/storage"
cloud "openreplay/backend/pkg/storage"
)
func main() {
metrics := monitoring.New("storage")
m := metrics.New()
m.Register(storageMetrics.List())
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
cfg := config.New()
if cfg.UseProfiler {
pprof.StartProfilingServer()
}
s3 := s3storage.NewS3(cfg.S3Region, cfg.S3Bucket)
srv, err := storage.New(cfg, s3, metrics)
s3 := cloud.NewS3(cfg.S3Region, cfg.S3Bucket)
srv, err := storage.New(cfg, s3)
if err != nil {
log.Printf("can't init storage service: %s", err)
return
@ -44,8 +50,8 @@ func main() {
messages.NewMessageIterator(
func(msg messages.Message) {
sesEnd := msg.(*messages.SessionEnd)
if err := srv.UploadSessionFiles(sesEnd); err != nil {
log.Printf("can't find session: %d", msg.SessionID())
if err := srv.Upload(sesEnd); err != nil {
log.Printf("upload session err: %s, sessID: %d", err, msg.SessionID())
sessionFinder.Find(msg.SessionID(), sesEnd.Timestamp)
}
// Log timestamp of last processed session
@ -54,7 +60,7 @@ func main() {
[]int{messages.MsgSessionEnd},
true,
),
true,
false,
cfg.MessageSizeLimit,
)
@ -69,10 +75,15 @@ func main() {
case sig := <-sigchan:
log.Printf("Caught signal %v: terminating\n", sig)
sessionFinder.Stop()
srv.Wait()
consumer.Close()
os.Exit(0)
case <-counterTick:
go counter.Print()
srv.Wait()
if err := consumer.Commit(); err != nil {
log.Printf("can't commit messages: %s", err)
}
case msg := <-consumer.Rebalanced():
log.Println(msg)
default:

View file

@ -8,7 +8,7 @@ require (
github.com/Masterminds/semver v1.5.0
github.com/aws/aws-sdk-go v1.44.98
github.com/btcsuite/btcutil v1.0.2
github.com/confluentinc/confluent-kafka-go v1.8.2
github.com/confluentinc/confluent-kafka-go v1.9.2
github.com/elastic/go-elasticsearch/v7 v7.13.1
github.com/go-redis/redis v6.15.9+incompatible
github.com/google/uuid v1.3.0
@ -20,14 +20,11 @@ require (
github.com/klauspost/pgzip v1.2.5
github.com/oschwald/maxminddb-golang v1.7.0
github.com/pkg/errors v0.9.1
github.com/prometheus/client_golang v1.12.1
github.com/sethvargo/go-envconfig v0.7.0
github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce
github.com/ua-parser/uap-go v0.0.0-20200325213135-e1c09f13e2fe
go.opentelemetry.io/otel v1.7.0
go.opentelemetry.io/otel/exporters/prometheus v0.30.0
go.opentelemetry.io/otel/metric v0.30.0
go.opentelemetry.io/otel/sdk/metric v0.30.0
golang.org/x/net v0.0.0-20220906165146-f3363e06e74c
golang.org/x/net v0.1.1-0.20221104162952-702349b0e862
google.golang.org/api v0.81.0
)
@ -38,8 +35,6 @@ require (
cloud.google.com/go/storage v1.14.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/cespare/xxhash/v2 v2.1.2 // indirect
github.com/go-logr/logr v1.2.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/google/go-cmp v0.5.8 // indirect
@ -55,20 +50,19 @@ require (
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
github.com/paulmach/orb v0.7.1 // indirect
github.com/pierrec/lz4/v4 v4.1.15 // indirect
github.com/prometheus/client_golang v1.12.1 // indirect
github.com/prometheus/client_model v0.2.0 // indirect
github.com/prometheus/common v0.32.1 // indirect
github.com/prometheus/procfs v0.7.3 // indirect
github.com/shopspring/decimal v1.3.1 // indirect
github.com/stretchr/testify v1.8.0 // indirect
go.opencensus.io v0.23.0 // indirect
go.opentelemetry.io/otel/sdk v1.7.0 // indirect
go.opentelemetry.io/otel v1.7.0 // indirect
go.opentelemetry.io/otel/trace v1.7.0 // indirect
golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4 // indirect
golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 // indirect
golang.org/x/sync v0.0.0-20220513210516-0976fa681c29 // indirect
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 // indirect
golang.org/x/text v0.4.0 // indirect
golang.org/x/sys v0.1.0 // indirect
golang.org/x/text v0.7.0 // indirect
golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df // indirect
google.golang.org/appengine v1.6.7 // indirect
google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd // indirect

View file

@ -68,6 +68,9 @@ github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3Q
github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg=
github.com/actgardner/gogen-avro/v10 v10.1.0/go.mod h1:o+ybmVjEa27AAr35FRqU98DJu1fXES56uXniYFv4yDA=
github.com/actgardner/gogen-avro/v10 v10.2.1/go.mod h1:QUhjeHPchheYmMDni/Nx7VB0RsT/ee8YIgGY/xpEQgQ=
github.com/actgardner/gogen-avro/v9 v9.1.0/go.mod h1:nyTj6wPqDJoxM3qdnjcLv+EnMDSDFqE0qDpva2QRmKc=
github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
@ -77,8 +80,6 @@ github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk5
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/aws/aws-sdk-go v1.44.98 h1:fX+NxebSdO/9T6DTNOLhpC+Vv6RNkKRfsMg0a7o/yBo=
github.com/aws/aws-sdk-go v1.44.98/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo=
github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A=
github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
@ -115,11 +116,12 @@ github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWH
github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
github.com/confluentinc/confluent-kafka-go v1.8.2 h1:PBdbvYpyOdFLehj8j+9ba7FL4c4Moxn79gy9cYKxG5E=
github.com/confluentinc/confluent-kafka-go v1.8.2/go.mod h1:u2zNLny2xq+5rWeTQjFHbDzzNuba4P1vo31r9r4uAdg=
github.com/confluentinc/confluent-kafka-go v1.9.2 h1:gV/GxhMBUb03tFWkN+7kdhg+zf+QUM+wVkI9zwh770Q=
github.com/confluentinc/confluent-kafka-go v1.9.2/go.mod h1:ptXNqsuDfYbAE/LBW6pnwWZElUoWxHoV8E43DCrliyo=
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
@ -136,6 +138,10 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.m
github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0=
github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/frankban/quicktest v1.2.2/go.mod h1:Qh/WofXFeiAFII1aEBu529AtJo6Zg2VHscnEsbBnJ20=
github.com/frankban/quicktest v1.7.2/go.mod h1:jaStnuzAqU1AJdCO0l53JDCJrVDKcS03DbaAcR7Ks/o=
github.com/frankban/quicktest v1.10.0/go.mod h1:ui7WezCLWMWxVWr1GETZY3smRy0G4KWq9vcPtJmFl7Y=
github.com/frankban/quicktest v1.14.0/go.mod h1:NeW+ay9A/U67EYXNFA1nPE8e/tnQv/09mUdL/ijj8og=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
@ -148,9 +154,7 @@ github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0=
github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-ole/go-ole v1.2.4/go.mod h1:XCwSNxSkXRo4vlyPy93sltvi/qJq0jqQhjqQNIwKuxM=
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
@ -195,10 +199,13 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS
github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM=
github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.2.1-0.20190312032427-6f77996f0c42/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
@ -233,6 +240,7 @@ github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLe
github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20211008130755-947d60d73cc0/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
@ -250,12 +258,17 @@ github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
github.com/hamba/avro v1.5.6/go.mod h1:3vNT0RLXXpFm2Tb/5KC71ZRJlOroggq1Rcitb6k4Fr8=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/heetch/avro v0.3.1/go.mod h1:4xn38Oz/+hiEUTpbVfGVLfvOg0yKLlRP7Q9+gJJILgA=
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/ianlancetaylor/demangle v0.0.0-20210905161508-09a460cdf81d/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w=
github.com/invopop/jsonschema v0.4.0/go.mod h1:O9uiLokuu0+MGFlyiaqtWxwqJm41/+8Nj0lD7A36YH0=
github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo=
github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk=
github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8=
@ -301,6 +314,11 @@ github.com/jackc/puddle v1.1.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dv
github.com/jackc/puddle v1.2.2-0.20220404125616-4e959849469a h1:oH7y/b+q2BEerCnARr/HZc1NxOYbKSJor4MqQXlhh+s=
github.com/jackc/puddle v1.2.2-0.20220404125616-4e959849469a/go.mod h1:ZQuO1Un86Xpe1ShKl08ERTzYhzWq+OvrvotbpeE3XO0=
github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
github.com/jhump/gopoet v0.0.0-20190322174617-17282ff210b3/go.mod h1:me9yfT6IJSlOL3FCfrg+L6yzUEZ+5jW6WHt4Sk+UPUI=
github.com/jhump/gopoet v0.1.0/go.mod h1:me9yfT6IJSlOL3FCfrg+L6yzUEZ+5jW6WHt4Sk+UPUI=
github.com/jhump/goprotoc v0.5.0/go.mod h1:VrbvcYrQOrTi3i0Vf+m+oqQWk9l72mjkJCYo7UvLHRQ=
github.com/jhump/protoreflect v1.11.0/go.mod h1:U7aMIjN0NWq9swDP7xDdoMfRHb35uiuTd3Z9nFXJf5E=
github.com/jhump/protoreflect v1.12.0/go.mod h1:JytZfP5d0r8pVNLZvai7U/MCuTWITgrI4tTg7puQFKI=
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
@ -314,6 +332,7 @@ github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/juju/qthttptest v0.1.1/go.mod h1:aTlAv8TYaflIiTDIQYzxnl1QdPjAg8Q8qJMErpKy6A4=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
@ -327,16 +346,23 @@ github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxv
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/linkedin/goavro v2.1.0+incompatible/go.mod h1:bBCwI2eGYpUI/4820s67MElg9tdeLbINjLjiM2xZFYM=
github.com/linkedin/goavro/v2 v2.10.0/go.mod h1:UgQUb2N/pmueQYH9bfqFioWxzYCZXSfF8Jw03O5sjqA=
github.com/linkedin/goavro/v2 v2.10.1/go.mod h1:UgQUb2N/pmueQYH9bfqFioWxzYCZXSfF8Jw03O5sjqA=
github.com/linkedin/goavro/v2 v2.11.1/go.mod h1:UgQUb2N/pmueQYH9bfqFioWxzYCZXSfF8Jw03O5sjqA=
github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ=
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
@ -354,6 +380,7 @@ github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3Rllmb
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/nrwiersma/avro-benchmarks v0.0.0-20210913175520-21aec48c8f76/go.mod h1:iKyFMidsk/sVYONJRE372sJuX/QTRPacU7imPqqsu7g=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.7.0 h1:WSHQ+IS43OoUrWtD1/bbclrwK8TTH5hzp+umCiuxHgs=
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
@ -367,6 +394,7 @@ github.com/paulmach/protoscan v0.2.1/go.mod h1:SpcSwydNLrxUGSDvXvO0P7g7AuhJ7lcKf
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pierrec/lz4/v4 v4.1.15 h1:MO0/ucJhngq7299dKLwIMtgTfbkoSPF6AoMYDd8Q4q0=
github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
@ -395,11 +423,16 @@ github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4O
github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
github.com/prometheus/procfs v0.7.3 h1:4jVXhlkAyzOScmCkXBTOLRLTz8EeU+eyjrwB/EPq0VU=
github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
github.com/rogpeppe/clock v0.0.0-20190514195947-2896927a307a/go.mod h1:4r5QyqhjIWCcK8DO4KMclc5Iknq5qVBAlbYYzAbUScQ=
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU=
github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc=
github.com/santhosh-tekuri/jsonschema/v5 v5.0.0/go.mod h1:FKdcjfQW6rpZSnxxUvEA5H/cDPdvJ/SZJQLWWXWGrZ0=
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/sethvargo/go-envconfig v0.7.0 h1:P/ljQXSRjgAgsnIripHs53Jg/uNVXu2FYQ9yLSDappA=
github.com/sethvargo/go-envconfig v0.7.0/go.mod h1:00S1FAhRUuTNJazWBWcJGvEHOM+NO6DhoRMAOX7FY5o=
@ -420,6 +453,7 @@ github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoH
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.3.1-0.20190311161405-34c6fa2dc709/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
@ -451,14 +485,6 @@ go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M=
go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
go.opentelemetry.io/otel v1.7.0 h1:Z2lA3Tdch0iDcrhJXDIlC94XE+bxok1F9B+4Lz/lGsM=
go.opentelemetry.io/otel v1.7.0/go.mod h1:5BdUoMIz5WEs0vt0CUEMtSSaTSHBBVwrhnz7+nrD5xk=
go.opentelemetry.io/otel/exporters/prometheus v0.30.0 h1:YXo5ZY5nofaEYMCMTTMaRH2cLDZB8+0UGuk5RwMfIo0=
go.opentelemetry.io/otel/exporters/prometheus v0.30.0/go.mod h1:qN5feW+0/d661KDtJuATEmHtw5bKBK7NSvNEP927zSs=
go.opentelemetry.io/otel/metric v0.30.0 h1:Hs8eQZ8aQgs0U49diZoaS6Uaxw3+bBE3lcMUKBFIk3c=
go.opentelemetry.io/otel/metric v0.30.0/go.mod h1:/ShZ7+TS4dHzDFmfi1kSXMhMVubNoP0oIaBp70J6UXU=
go.opentelemetry.io/otel/sdk v1.7.0 h1:4OmStpcKVOfvDOgCt7UriAPtKolwIhxpnSNI/yK+1B0=
go.opentelemetry.io/otel/sdk v1.7.0/go.mod h1:uTEOTwaqIVuTGiJN7ii13Ibp75wJmYUDe374q6cZwUU=
go.opentelemetry.io/otel/sdk/metric v0.30.0 h1:XTqQ4y3erR2Oj8xSAOL5ovO5011ch2ELg51z4fVkpME=
go.opentelemetry.io/otel/sdk/metric v0.30.0/go.mod h1:8AKFRi5HyvTR0RRty3paN1aMC9HMT+NzcEhw/BLkLX8=
go.opentelemetry.io/otel/trace v1.7.0 h1:O37Iogk1lEkMRXewVtZ1BBTVn5JEp8GrJvP92bJqC6o=
go.opentelemetry.io/otel/trace v1.7.0/go.mod h1:fzLSB9nqR2eXzxPXb2JW9IKE+ScyXA48yyE4TNvoHqU=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
@ -539,6 +565,7 @@ golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLL
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200505041828-1ed23360d12c/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
@ -562,8 +589,8 @@ golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su
golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220906165146-f3363e06e74c h1:yKufUcDwucU5urd+50/Opbt4AYpqthk7wHpHok8f1lo=
golang.org/x/net v0.0.0-20220906165146-f3363e06e74c/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
golang.org/x/net v0.1.1-0.20221104162952-702349b0e862 h1:KrLJ+iz8J6j6VVr/OCfULAcK+xozUmWE43fKpMR4MlI=
golang.org/x/net v0.1.1-0.20221104162952-702349b0e862/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@ -651,7 +678,6 @@ golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210503080704-8803ae5d1324/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@ -663,6 +689,7 @@ golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@ -675,8 +702,8 @@ golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220429233432-b5fbb4746d32/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 h1:WIoqL4EROvwiPdUtaip4VcDdpZ4kha7wBWZrbVKCIZg=
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0 h1:kunALQeHf1/185U1i0GOB/fy1IPRDDpuoOOqRReG57U=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -688,8 +715,8 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.4.0 h1:BrVqGRd7+k1DiOgtnFvAkoQEWQvBc25ouMJM6429SFg=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@ -729,6 +756,7 @@ golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjs
golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200505023115-26f46d2f7ef8/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
@ -882,6 +910,7 @@ google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX
google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
google.golang.org/genproto v0.0.0-20220503193339-ba3ae3f07e29/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4=
google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4=
google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd h1:e0TwkXOdbnH/1x5rc5MZ/VYyiZ4v+RdVfrGMqEwT68I=
google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4=
@ -933,14 +962,19 @@ google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ
google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw=
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/avro.v0 v0.0.0-20171217001914-a730b5802183/go.mod h1:FvqrFXt+jCsyQibeRv4xxEJBL5iG2DDW5aeJwzDiq4A=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/errgo.v1 v1.0.0/go.mod h1:CxwszS/Xz1C49Ucd2i6Zil5UToP1EmyrFhKaMVbg1mk=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/httprequest.v1 v1.2.1/go.mod h1:x2Otw96yda5+8+6ZeWwHIJTFkEHWP/qP8pJOzqEtWPM=
gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s=
gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA=
gopkg.in/retry.v1 v1.0.3/go.mod h1:FJkXmWiMaAo7xB+xhvDF59zhfjDWyzmyAxiT4dB688g=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
@ -948,11 +982,13 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=

View file

@ -1,16 +1,13 @@
package cacher
import (
"context"
"crypto/tls"
"fmt"
"go.opentelemetry.io/otel/metric/instrument/syncfloat64"
"io"
"io/ioutil"
"log"
"mime"
"net/http"
"openreplay/backend/pkg/monitoring"
metrics "openreplay/backend/pkg/metrics/assets"
"path/filepath"
"strings"
"time"
@ -25,30 +22,22 @@ import (
const MAX_CACHE_DEPTH = 5
type cacher struct {
timeoutMap *timeoutMap // Concurrency implemented
s3 *storage.S3 // AWS Docs: "These clients are safe to use concurrently."
httpClient *http.Client // Docs: "Clients are safe for concurrent use by multiple goroutines."
rewriter *assets.Rewriter // Read only
Errors chan error
sizeLimit int
downloadedAssets syncfloat64.Counter
requestHeaders map[string]string
workers *WorkerPool
timeoutMap *timeoutMap // Concurrency implemented
s3 *storage.S3 // AWS Docs: "These clients are safe to use concurrently."
httpClient *http.Client // Docs: "Clients are safe for concurrent use by multiple goroutines."
rewriter *assets.Rewriter // Read only
Errors chan error
sizeLimit int
requestHeaders map[string]string
workers *WorkerPool
}
func (c *cacher) CanCache() bool {
return c.workers.CanAddTask()
}
func NewCacher(cfg *config.Config, metrics *monitoring.Metrics) *cacher {
func NewCacher(cfg *config.Config) *cacher {
rewriter := assets.NewRewriter(cfg.AssetsOrigin)
if metrics == nil {
log.Fatalf("metrics are empty")
}
downloadedAssets, err := metrics.RegisterCounter("assets_downloaded")
if err != nil {
log.Printf("can't create downloaded_assets metric: %s", err)
}
c := &cacher{
timeoutMap: newTimeoutMap(),
s3: storage.NewS3(cfg.AWSRegion, cfg.S3BucketAssets),
@ -59,11 +48,10 @@ func NewCacher(cfg *config.Config, metrics *monitoring.Metrics) *cacher {
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
},
},
rewriter: rewriter,
Errors: make(chan error),
sizeLimit: cfg.AssetsSizeLimit,
downloadedAssets: downloadedAssets,
requestHeaders: cfg.AssetsRequestHeaders,
rewriter: rewriter,
Errors: make(chan error),
sizeLimit: cfg.AssetsSizeLimit,
requestHeaders: cfg.AssetsRequestHeaders,
}
c.workers = NewPool(64, c.CacheFile)
return c
@ -75,6 +63,7 @@ func (c *cacher) CacheFile(task *Task) {
func (c *cacher) cacheURL(t *Task) {
t.retries--
start := time.Now()
req, _ := http.NewRequest("GET", t.requestURL, nil)
if t.retries%2 == 0 {
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 6.1; rv:31.0) Gecko/20100101 Firefox/31.0")
@ -87,11 +76,12 @@ func (c *cacher) cacheURL(t *Task) {
c.Errors <- errors.Wrap(err, t.urlContext)
return
}
metrics.RecordDownloadDuration(float64(time.Now().Sub(start).Milliseconds()), res.StatusCode)
defer res.Body.Close()
if res.StatusCode >= 400 {
printErr := true
// Retry 403 error
if res.StatusCode == 403 && t.retries > 0 {
// Retry 403/503 errors
if (res.StatusCode == 403 || res.StatusCode == 503) && t.retries > 0 {
c.workers.AddTask(t)
printErr = false
}
@ -122,12 +112,15 @@ func (c *cacher) cacheURL(t *Task) {
}
// TODO: implement in streams
start = time.Now()
err = c.s3.Upload(strings.NewReader(strData), t.cachePath, contentType, false)
if err != nil {
metrics.RecordUploadDuration(float64(time.Now().Sub(start).Milliseconds()), true)
c.Errors <- errors.Wrap(err, t.urlContext)
return
}
c.downloadedAssets.Add(context.Background(), 1)
metrics.RecordUploadDuration(float64(time.Now().Sub(start).Milliseconds()), false)
metrics.IncreaseSavedSessions()
if isCSS {
if t.depth > 0 {

View file

@ -14,6 +14,7 @@ type Config struct {
AssetsOrigin string `env:"ASSETS_ORIGIN,required"`
AssetsSizeLimit int `env:"ASSETS_SIZE_LIMIT,required"`
AssetsRequestHeaders map[string]string `env:"ASSETS_REQUEST_HEADERS"`
UseProfiler bool `env:"PROFILER_ENABLED,default=false"`
}
func New() *Config {

View file

@ -1,5 +1,7 @@
package common
import "strings"
type Config struct {
ConfigFilePath string `env:"CONFIG_FILE_PATH"`
MessageSizeLimit int `env:"QUEUE_MESSAGE_SIZE_LIMIT,default=1048576"`
@ -12,3 +14,21 @@ type Configer interface {
func (c *Config) GetConfigPath() string {
return c.ConfigFilePath
}
type Postgres struct {
Postgres string `env:"POSTGRES_STRING,required"`
ApplicationName string `env:"SERVICE_NAME,default='worker'"`
}
func (cfg *Postgres) String() string {
str := cfg.Postgres
if !strings.Contains(cfg.Postgres, "application_name") {
if strings.Contains(cfg.Postgres, "?") {
str += "&"
} else {
str += "?"
}
str += "application_name=" + cfg.ApplicationName
}
return str
}

View file

@ -8,7 +8,7 @@ import (
type Config struct {
common.Config
Postgres string `env:"POSTGRES_STRING,required"`
common.Postgres
ProjectExpirationTimeoutMs int64 `env:"PROJECT_EXPIRATION_TIMEOUT_MS,default=1200000"`
LoggerTimeout int `env:"LOG_QUEUE_STATS_INTERVAL_SEC,required"`
GroupDB string `env:"GROUP_DB,required"`
@ -18,6 +18,8 @@ type Config struct {
BatchQueueLimit int `env:"DB_BATCH_QUEUE_LIMIT,required"`
BatchSizeLimit int `env:"DB_BATCH_SIZE_LIMIT,required"`
UseQuickwit bool `env:"QUICKWIT_ENABLED,default=false"`
QuickwitTopic string `env:"QUICKWIT_TOPIC,default=saas-quickwit"`
UseProfiler bool `env:"PROFILER_ENABLED,default=false"`
}
func New() *Config {

View file

@ -7,7 +7,7 @@ import (
type Config struct {
common.Config
Postgres string `env:"POSTGRES_STRING,required"`
common.Postgres
ProjectExpirationTimeoutMs int64 `env:"PROJECT_EXPIRATION_TIMEOUT_MS,default=1200000"`
GroupEnder string `env:"GROUP_ENDER,required"`
LoggerTimeout int `env:"LOG_QUEUE_STATS_INTERVAL_SEC,required"`
@ -15,6 +15,7 @@ type Config struct {
ProducerTimeout int `env:"PRODUCER_TIMEOUT,default=2000"`
PartitionsNumber int `env:"PARTITIONS_NUMBER,required"`
UseEncryption bool `env:"USE_ENCRYPTION,default=false"`
UseProfiler bool `env:"PROFILER_ENABLED,default=false"`
}
func New() *Config {

View file

@ -13,6 +13,7 @@ type Config struct {
TopicRawWeb string `env:"TOPIC_RAW_WEB,required"`
TopicRawIOS string `env:"TOPIC_RAW_IOS,required"`
ProducerTimeout int `env:"PRODUCER_TIMEOUT,default=2000"`
UseProfiler bool `env:"PROFILER_ENABLED,default=false"`
}
func New() *Config {

View file

@ -9,6 +9,7 @@ import (
type Config struct {
common.Config
common.Postgres
HTTPHost string `env:"HTTP_HOST,default="`
HTTPPort string `env:"HTTP_PORT,required"`
HTTPTimeout time.Duration `env:"HTTP_TIMEOUT,default=60s"`
@ -19,10 +20,10 @@ type Config struct {
FileSizeLimit int64 `env:"FILE_SIZE_LIMIT,default=10000000"`
AWSRegion string `env:"AWS_REGION,required"`
S3BucketIOSImages string `env:"S3_BUCKET_IOS_IMAGES,required"`
Postgres string `env:"POSTGRES_STRING,required"`
TokenSecret string `env:"TOKEN_SECRET,required"`
UAParserFile string `env:"UAPARSER_FILE,required"`
MaxMinDBFile string `env:"MAXMINDDB_FILE,required"`
UseProfiler bool `env:"PROFILER_ENABLED,default=false"`
WorkerID uint16
}

Some files were not shown because too many files have changed in this diff Show more