Compare commits
410 commits
main
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fc256b3274 | ||
|
|
13e83fa774 | ||
|
|
1fe83a3eac | ||
|
|
5c1b4712bd | ||
|
|
d4193bcbd3 | ||
|
|
a7c02b7cd5 | ||
|
|
644a7b68b0 | ||
|
|
51b838a2b4 | ||
|
|
fcece8d5c1 | ||
|
|
35c515556e | ||
|
|
c3585309ff | ||
|
|
1f28300d05 | ||
|
|
9c41ba62ac | ||
|
|
cddd3a1d5e | ||
|
|
a1f0312a10 | ||
|
|
c760395149 | ||
|
|
1d44799b3c | ||
|
|
da476b2b86 | ||
|
|
c8f7a2e453 | ||
|
|
373238429e | ||
|
|
c35ace3544 | ||
|
|
09c98b5bde | ||
|
|
85f6551b32 | ||
|
|
4936268afc | ||
|
|
d687bb7db4 | ||
|
|
1976320fbe | ||
|
|
70a60b102b | ||
|
|
8b990f7732 | ||
|
|
ed66b30b7d | ||
|
|
17b72d4242 | ||
|
|
6264e21030 | ||
|
|
4d453be279 | ||
|
|
22502e7467 | ||
|
|
4111d6a2c7 | ||
|
|
6129b088f1 | ||
|
|
66b3708cce | ||
|
|
6ea7ca6aa2 | ||
|
|
ed5c874765 | ||
|
|
1d113174e4 | ||
|
|
f402a44e18 | ||
|
|
67dcb48a7f | ||
|
|
25e98de6e2 | ||
|
|
0d9c8d70c8 | ||
|
|
75230d72ec | ||
|
|
64cd7192ad | ||
|
|
974bbf7e4f | ||
|
|
256b049e7d | ||
|
|
3c249b2b5a | ||
|
|
82599f4afd | ||
|
|
3460a65b79 | ||
|
|
a8d0de4e98 | ||
|
|
acedff8a7a | ||
|
|
697bf9d085 | ||
|
|
ed88d32c32 | ||
|
|
0a1633d623 | ||
|
|
4e2158ab64 | ||
|
|
517fe6c99e | ||
|
|
7217517a11 | ||
|
|
3bd2578c9c | ||
|
|
ceabceb184 | ||
|
|
70dda4032a | ||
|
|
bace9735df | ||
|
|
2c975c768e | ||
|
|
4f02fd1e9e | ||
|
|
ed39bbf1d4 | ||
|
|
63b89c816b | ||
|
|
59c10cdbea | ||
|
|
7fc744c273 | ||
|
|
bb13cb1911 | ||
|
|
ce4f03f049 | ||
|
|
15b1e6c767 | ||
|
|
4581c29d9d | ||
|
|
ddee4d2e2a | ||
|
|
cf0fcd3893 | ||
|
|
d42eb0333c | ||
|
|
47b9176d6f | ||
|
|
ffe99b319c | ||
|
|
258514b333 | ||
|
|
effee41321 | ||
|
|
be9ef3bd18 | ||
|
|
235364b968 | ||
|
|
98c82aa126 | ||
|
|
8cd0a0ba07 | ||
|
|
04a63e3f84 | ||
|
|
fa9c4d3398 | ||
|
|
52a208024d | ||
|
|
3410aec605 | ||
|
|
58111d2323 | ||
|
|
d3d1a40909 | ||
|
|
dedeb4cb2c | ||
|
|
92feaa3641 | ||
|
|
58314ff2f3 | ||
|
|
99b6238fc7 | ||
|
|
e365b7b14f | ||
|
|
5b6e9ab7e0 | ||
|
|
f1e1d37d8e | ||
|
|
07cc0f0939 | ||
|
|
eab0f60734 | ||
|
|
f3f7992c0a | ||
|
|
24a220bc51 | ||
|
|
83ebd01526 | ||
|
|
c5555d7343 | ||
|
|
bd97e15e9a | ||
|
|
3431c97712 | ||
|
|
2d58cf2da4 | ||
|
|
c84aa417e1 | ||
|
|
6f0deb57da | ||
|
|
3597523a04 | ||
|
|
5de6d5de98 | ||
|
|
3d02e7bbe3 | ||
|
|
1098f877e6 | ||
|
|
8e0b30ece4 | ||
|
|
b8f97ad15b | ||
|
|
b7028ff131 | ||
|
|
d42c4a46f9 | ||
|
|
1bb8f3a7b3 | ||
|
|
06ff696141 | ||
|
|
f8c9275127 | ||
|
|
a06f035b5a | ||
|
|
0d3a2015b2 | ||
|
|
0139e0f1d5 | ||
|
|
cde2c53476 | ||
|
|
af8996b54a | ||
|
|
edf6b2060e | ||
|
|
e998543f3a | ||
|
|
2bb3300a95 | ||
|
|
7e9d6b4761 | ||
|
|
bf79a4c893 | ||
|
|
dcf6fdb1c9 | ||
|
|
a4f6a93c59 | ||
|
|
c4ad390b3f | ||
|
|
d378b00bf7 | ||
|
|
bb6e2cbbdc | ||
|
|
6b30e261a5 | ||
|
|
f29e729acb | ||
|
|
56b6c6c7e6 | ||
|
|
0a5d4413ca | ||
|
|
ddb47631b6 | ||
|
|
04beacde61 | ||
|
|
9fec22319b | ||
|
|
dca5e54811 | ||
|
|
4e331b70a4 | ||
|
|
5cccaaa782 | ||
|
|
58d1f7c145 | ||
|
|
cbb930379d | ||
|
|
b9e6bd6e72 | ||
|
|
6f3058f9f9 | ||
|
|
c71db6c441 | ||
|
|
3200107d71 | ||
|
|
526dfd7e21 | ||
|
|
e92ba42d82 | ||
|
|
b1b21937ed | ||
|
|
55d435be87 | ||
|
|
4972cfad94 | ||
|
|
5c24594d5b | ||
|
|
27e20c4ef1 | ||
|
|
3edea4acb4 | ||
|
|
5304dbf8c1 | ||
|
|
0c64003b09 | ||
|
|
f38b3a830d | ||
|
|
500fb44856 | ||
|
|
2a483b62f0 | ||
|
|
c9b29c5c3d | ||
|
|
889fde91a9 | ||
|
|
eb7f3fb7a0 | ||
|
|
d58031caf6 | ||
|
|
83b6b6a3dd | ||
|
|
db5fc57e43 | ||
|
|
b4fd3def10 | ||
|
|
d477862edf | ||
|
|
821db5c0d5 | ||
|
|
4a9a082896 | ||
|
|
b109dd559a | ||
|
|
de04e23c51 | ||
|
|
c6b0649613 | ||
|
|
d2d886b322 | ||
|
|
a009ff928c | ||
|
|
a13f427816 | ||
|
|
8a69316b82 | ||
|
|
1576208e25 | ||
|
|
3ac5c30c5f | ||
|
|
39d3d8db4c | ||
|
|
812983f97c | ||
|
|
1df4a92901 | ||
|
|
d9fe534223 | ||
|
|
95d4df7a1b | ||
|
|
b3cb8df65b | ||
|
|
18f8ee9d15 | ||
|
|
2eb4ab4b84 | ||
|
|
a3fdad3de1 | ||
|
|
5ee2b125c8 | ||
|
|
360d6ca382 | ||
|
|
b477f9637d | ||
|
|
626c453a80 | ||
|
|
859100213d | ||
|
|
ec6076c0b3 | ||
|
|
878c10f723 | ||
|
|
2c0a75b11f | ||
|
|
718b3f3223 | ||
|
|
9f6dc788c4 | ||
|
|
9b9248f1f6 | ||
|
|
f591e886d7 | ||
|
|
cb0ea165f9 | ||
|
|
db404c0648 | ||
|
|
e4d75467ef | ||
|
|
69b8e2e774 | ||
|
|
160370f45e | ||
|
|
53f3623481 | ||
|
|
324299170e | ||
|
|
f5f47103c3 | ||
|
|
70390920cd | ||
|
|
447a2490ef | ||
|
|
ad3f72a10b | ||
|
|
70d2bbd9b9 | ||
|
|
bbdde7be81 | ||
|
|
22d71ceb14 | ||
|
|
53797500bf | ||
|
|
7217959992 | ||
|
|
effdfaef2c | ||
|
|
0c3bac0fe0 | ||
|
|
1f55f8241c | ||
|
|
8b75dfa149 | ||
|
|
94642d2871 | ||
|
|
33f571acc4 | ||
|
|
b886e9b242 | ||
|
|
f963ff394d | ||
|
|
a26411f2a6 | ||
|
|
ee71625499 | ||
|
|
7d6f838d25 | ||
|
|
089539ef7e | ||
|
|
373d71e4f3 | ||
|
|
cde427ae4c | ||
|
|
7cfef90cc8 | ||
|
|
04db655776 | ||
|
|
b91f5df89f | ||
|
|
7fd741348c | ||
|
|
2aaafa5b22 | ||
|
|
11f9b865cf | ||
|
|
60a691bbaf | ||
|
|
3f1f6c03f2 | ||
|
|
dcd19e3c83 | ||
|
|
ced855568f | ||
|
|
c8483df795 | ||
|
|
d544da0665 | ||
|
|
408c3122d3 | ||
|
|
c196736c3c | ||
|
|
d47542830f | ||
|
|
055ff8f64a | ||
|
|
2bf92f40f7 | ||
|
|
f0f78341e7 | ||
|
|
dbb805189f | ||
|
|
e32dbe2ee2 | ||
|
|
3272f5b9fd | ||
|
|
ea4e2ab198 | ||
|
|
990e1fa1c4 | ||
|
|
5ca97ceedd | ||
|
|
d3b8c35058 | ||
|
|
1b851a8b72 | ||
|
|
553e3f6045 | ||
|
|
3f73bae22f | ||
|
|
9160b42113 | ||
|
|
36e1a2fca2 | ||
|
|
cbbd480cca | ||
|
|
77ae0cac0e | ||
|
|
5771323800 | ||
|
|
aab8691cf5 | ||
|
|
d9ff3f4691 | ||
|
|
09c2ce0976 | ||
|
|
0141a42911 | ||
|
|
b55e44d450 | ||
|
|
f70cce7e23 | ||
|
|
8b3be469b6 | ||
|
|
dc975bc19a | ||
|
|
c1d51b98a2 | ||
|
|
5a51bfb984 | ||
|
|
b55b9e5515 | ||
|
|
af7b46516f | ||
|
|
05e0306823 | ||
|
|
77a8371543 | ||
|
|
e4406ad26b | ||
|
|
a8971d842b | ||
|
|
c003057cf0 | ||
|
|
586472c7dd | ||
|
|
ecb192f16e | ||
|
|
6dc585417f | ||
|
|
264444c92a | ||
|
|
b2fcd7094b | ||
|
|
f3b98dad8a | ||
|
|
c27213c65d | ||
|
|
f61c5e99b5 | ||
|
|
6412f14b08 | ||
|
|
0a620c6ba3 | ||
|
|
685741f039 | ||
|
|
4ee78e1a5c | ||
|
|
77735d9d72 | ||
|
|
e3065e0530 | ||
|
|
d9d4221ad3 | ||
|
|
0bbde3e75a | ||
|
|
7dec8bb943 | ||
|
|
c6a5ed6c3b | ||
|
|
99d62fa549 | ||
|
|
c0bb05bc0f | ||
|
|
70258e5c1d | ||
|
|
6ec146b24b | ||
|
|
9f464e3b41 | ||
|
|
e95bdab478 | ||
|
|
421b3d1dc5 | ||
|
|
437a25fb97 | ||
|
|
cb55a17227 | ||
|
|
9d160abda5 | ||
|
|
3758cf6565 | ||
|
|
9db5e2a8f7 | ||
|
|
e0dba41065 | ||
|
|
8fbaf25799 | ||
|
|
65072f607f | ||
|
|
cb4bf932c4 | ||
|
|
20b938365c | ||
|
|
8e68ebd52b | ||
|
|
293382ea85 | ||
|
|
ac35bf5179 | ||
|
|
eb610d1c21 | ||
|
|
ac0ccb2169 | ||
|
|
20a57d7ca1 | ||
|
|
856e716507 | ||
|
|
bb17f672fe | ||
|
|
d087736df0 | ||
|
|
ce546bcfa3 | ||
|
|
9f681aca45 | ||
|
|
0500f30d14 | ||
|
|
ec2c42c688 | ||
|
|
7f0bc100f5 | ||
|
|
522a985ef3 | ||
|
|
634d0e8a0f | ||
|
|
28b4fc7598 | ||
|
|
0d4c256ca8 | ||
|
|
35f63a8fb1 | ||
|
|
a4e96822ed | ||
|
|
96f984a76a | ||
|
|
5f15dfafe7 | ||
|
|
b9cca6b388 | ||
|
|
712f07988e | ||
|
|
08bddb3165 | ||
|
|
3efb879cdf | ||
|
|
ccf44fda70 | ||
|
|
ce525a4ccf | ||
|
|
c6299c4592 | ||
|
|
a371c79151 | ||
|
|
f59a8c24f4 | ||
|
|
8be6f63711 | ||
|
|
8ba35b1324 | ||
|
|
28dea3b225 | ||
|
|
666643a6ae | ||
|
|
4cf688f15c | ||
|
|
1e57c90449 | ||
|
|
c0678bab15 | ||
|
|
187a69a61a | ||
|
|
2e96a072e9 | ||
|
|
5a410e63b3 | ||
|
|
300a857a5c | ||
|
|
eba22e0efa | ||
|
|
664f6b9014 | ||
|
|
5bbd7cff10 | ||
|
|
6f172d4f01 | ||
|
|
829e1c8bde | ||
|
|
e7d309dadf | ||
|
|
4bac12308a | ||
|
|
2aba1d9a52 | ||
|
|
1f4e32e4f2 | ||
|
|
49f98967d6 | ||
|
|
356fa02094 | ||
|
|
a8e47e59ad | ||
|
|
c760d29fb4 | ||
|
|
d77a518cf0 | ||
|
|
e04c2aa251 | ||
|
|
e6eb41536d | ||
|
|
4b3ad60565 | ||
|
|
90669b0604 | ||
|
|
f4bf1b8960 | ||
|
|
70423c6d8e | ||
|
|
ae313c17d4 | ||
|
|
0e45fa53ad | ||
|
|
fe20f83130 | ||
|
|
d04e6686ca | ||
|
|
6adb45e15f | ||
|
|
a1337faeee | ||
|
|
7e065ab02f | ||
|
|
1e2dde09b4 | ||
|
|
3cdfe76134 | ||
|
|
39855651d5 | ||
|
|
dd469d2349 | ||
|
|
3d448320bf | ||
|
|
7b0771a581 | ||
|
|
988b396223 | ||
|
|
fa3b585785 | ||
|
|
91e0ebeb56 | ||
|
|
8e68eb9a20 | ||
|
|
13bd3d9121 | ||
|
|
048ae0913c | ||
|
|
73fff8b817 | ||
|
|
605fa96a34 | ||
|
|
2cb33d7894 | ||
|
|
15d427418d | ||
|
|
ed3e553726 | ||
|
|
7eace68de6 | ||
|
|
8009882cef | ||
|
|
7365d8639c | ||
|
|
4c967d4bc1 | ||
|
|
3fdf799bd7 | ||
|
|
9aca716e6b | ||
|
|
cf9ecdc9a4 |
686 changed files with 35718 additions and 14216 deletions
|
|
@ -47,6 +47,7 @@ runs:
|
|||
"JWT_SECRET:.global.jwtSecret"
|
||||
"JWT_SPOT_REFRESH_SECRET:.chalice.env.JWT_SPOT_REFRESH_SECRET"
|
||||
"JWT_SPOT_SECRET:.global.jwtSpotSecret"
|
||||
"JWT_SECRET:.global.tokenSecret"
|
||||
"LICENSE_KEY:.global.enterpriseEditionLicense"
|
||||
"MINIO_ACCESS_KEY:.global.s3.accessKey"
|
||||
"MINIO_SECRET_KEY:.global.s3.secretKey"
|
||||
|
|
|
|||
2
.github/workflows/alerts-ee.yaml
vendored
2
.github/workflows/alerts-ee.yaml
vendored
|
|
@ -130,7 +130,7 @@ jobs:
|
|||
cat /tmp/image_override.yaml
|
||||
# Deploy command
|
||||
mkdir -p /tmp/charts
|
||||
mv openreplay/charts/{ingress-nginx,alerts,quickwit,connector} /tmp/charts/
|
||||
mv openreplay/charts/{ingress-nginx,alerts,quickwit,connector,assist-api} /tmp/charts/
|
||||
rm -rf openreplay/charts/*
|
||||
mv /tmp/charts/* openreplay/charts/
|
||||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
|
||||
|
|
|
|||
2
.github/workflows/alerts.yaml
vendored
2
.github/workflows/alerts.yaml
vendored
|
|
@ -130,7 +130,7 @@ jobs:
|
|||
cat /tmp/image_override.yaml
|
||||
# Deploy command
|
||||
mkdir -p /tmp/charts
|
||||
mv openreplay/charts/{ingress-nginx,alerts,quickwit,connector} /tmp/charts/
|
||||
mv openreplay/charts/{ingress-nginx,alerts,quickwit,connector,assist-api} /tmp/charts/
|
||||
rm -rf openreplay/charts/*
|
||||
mv /tmp/charts/* openreplay/charts/
|
||||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f -
|
||||
|
|
|
|||
2
.github/workflows/api-ee.yaml
vendored
2
.github/workflows/api-ee.yaml
vendored
|
|
@ -127,7 +127,7 @@ jobs:
|
|||
cat /tmp/image_override.yaml
|
||||
# Deploy command
|
||||
mkdir -p /tmp/charts
|
||||
mv openreplay/charts/{ingress-nginx,chalice,quickwit,connector} /tmp/charts/
|
||||
mv openreplay/charts/{ingress-nginx,chalice,quickwit,connector,assist-api} /tmp/charts/
|
||||
rm -rf openreplay/charts/*
|
||||
mv /tmp/charts/* openreplay/charts/
|
||||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
|
||||
|
|
|
|||
2
.github/workflows/api.yaml
vendored
2
.github/workflows/api.yaml
vendored
|
|
@ -120,7 +120,7 @@ jobs:
|
|||
cat /tmp/image_override.yaml
|
||||
# Deploy command
|
||||
mkdir -p /tmp/charts
|
||||
mv openreplay/charts/{ingress-nginx,chalice,quickwit,connector} /tmp/charts/
|
||||
mv openreplay/charts/{ingress-nginx,chalice,quickwit,connector,assist-api} /tmp/charts/
|
||||
rm -rf openreplay/charts/*
|
||||
mv /tmp/charts/* openreplay/charts/
|
||||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f -
|
||||
|
|
|
|||
2
.github/workflows/assist-ee.yaml
vendored
2
.github/workflows/assist-ee.yaml
vendored
|
|
@ -113,7 +113,7 @@ jobs:
|
|||
cat /tmp/image_override.yaml
|
||||
# Deploy command
|
||||
mkdir -p /tmp/charts
|
||||
mv openreplay/charts/{ingress-nginx,assist,quickwit,connector} /tmp/charts/
|
||||
mv openreplay/charts/{ingress-nginx,assist,quickwit,connector,assist-api} /tmp/charts/
|
||||
rm -rf openreplay/charts/*
|
||||
mv /tmp/charts/* openreplay/charts/
|
||||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
|
||||
|
|
|
|||
122
.github/workflows/assist-server-ee.yaml
vendored
Normal file
122
.github/workflows/assist-server-ee.yaml
vendored
Normal file
|
|
@ -0,0 +1,122 @@
|
|||
# This action will push the assist changes to aws
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
skip_security_checks:
|
||||
description: "Skip Security checks if there is a unfixable vuln or error. Value: true/false"
|
||||
required: false
|
||||
default: "false"
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
paths:
|
||||
- "ee/assist-server/**"
|
||||
|
||||
name: Build and Deploy Assist-Server EE
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# We need to diff with old commit
|
||||
# to see which workers got changed.
|
||||
fetch-depth: 2
|
||||
|
||||
- uses: ./.github/composite-actions/update-keys
|
||||
with:
|
||||
assist_jwt_secret: ${{ secrets.ASSIST_JWT_SECRET }}
|
||||
assist_key: ${{ secrets.ASSIST_KEY }}
|
||||
domain_name: ${{ secrets.EE_DOMAIN_NAME }}
|
||||
jwt_refresh_secret: ${{ secrets.JWT_REFRESH_SECRET }}
|
||||
jwt_secret: ${{ secrets.EE_JWT_SECRET }}
|
||||
jwt_spot_refresh_secret: ${{ secrets.JWT_SPOT_REFRESH_SECRET }}
|
||||
jwt_spot_secret: ${{ secrets.JWT_SPOT_SECRET }}
|
||||
license_key: ${{ secrets.EE_LICENSE_KEY }}
|
||||
minio_access_key: ${{ secrets.EE_MINIO_ACCESS_KEY }}
|
||||
minio_secret_key: ${{ secrets.EE_MINIO_SECRET_KEY }}
|
||||
pg_password: ${{ secrets.EE_PG_PASSWORD }}
|
||||
registry_url: ${{ secrets.OSS_REGISTRY_URL }}
|
||||
name: Update Keys
|
||||
|
||||
- name: Docker login
|
||||
run: |
|
||||
docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}"
|
||||
|
||||
- uses: azure/k8s-set-context@v1
|
||||
with:
|
||||
method: kubeconfig
|
||||
kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret.
|
||||
id: setcontext
|
||||
|
||||
- name: Building and Pushing Assist-Server image
|
||||
id: build-image
|
||||
env:
|
||||
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee
|
||||
ENVIRONMENT: staging
|
||||
run: |
|
||||
skip_security_checks=${{ github.event.inputs.skip_security_checks }}
|
||||
cd assist-server
|
||||
PUSH_IMAGE=0 bash -x ./build.sh ee
|
||||
[[ "x$skip_security_checks" == "xtrue" ]] || {
|
||||
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.56.2/trivy_0.56.2_Linux-64bit.tar.gz | tar -xzf - -C ./
|
||||
images=("assist-server")
|
||||
for image in ${images[*]};do
|
||||
./trivy image --db-repository ghcr.io/aquasecurity/trivy-db:2 --db-repository public.ecr.aws/aquasecurity/trivy-db:2 --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
|
||||
done
|
||||
err_code=$?
|
||||
[[ $err_code -ne 0 ]] && {
|
||||
exit $err_code
|
||||
}
|
||||
} && {
|
||||
echo "Skipping Security Checks"
|
||||
}
|
||||
images=("assist-server")
|
||||
for image in ${images[*]};do
|
||||
docker push $DOCKER_REPO/$image:$IMAGE_TAG
|
||||
done
|
||||
- name: Creating old image input
|
||||
run: |
|
||||
#
|
||||
# Create yaml with existing image tags
|
||||
#
|
||||
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
|
||||
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
|
||||
|
||||
echo > /tmp/image_override.yaml
|
||||
|
||||
for line in `cat /tmp/image_tag.txt`;
|
||||
do
|
||||
image_array=($(echo "$line" | tr ':' '\n'))
|
||||
cat <<EOF >> /tmp/image_override.yaml
|
||||
${image_array[0]}:
|
||||
image:
|
||||
# We've to strip off the -ee, as helm will append it.
|
||||
tag: `echo ${image_array[1]} | cut -d '-' -f 1`
|
||||
EOF
|
||||
done
|
||||
- name: Deploy to kubernetes
|
||||
run: |
|
||||
pwd
|
||||
cd scripts/helmcharts/
|
||||
|
||||
# Update changed image tag
|
||||
sed -i "/assist-server/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
|
||||
|
||||
cat /tmp/image_override.yaml
|
||||
# Deploy command
|
||||
mkdir -p /tmp/charts
|
||||
mv openreplay/charts/{ingress-nginx,assist-server,quickwit,connector,assist-api} /tmp/charts/
|
||||
rm -rf openreplay/charts/*
|
||||
mv /tmp/charts/* openreplay/charts/
|
||||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
|
||||
env:
|
||||
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# We're not passing -ee flag, because helm will add that.
|
||||
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
|
||||
ENVIRONMENT: staging
|
||||
2
.github/workflows/assist-stats.yaml
vendored
2
.github/workflows/assist-stats.yaml
vendored
|
|
@ -130,7 +130,7 @@ jobs:
|
|||
cat /tmp/image_override.yaml
|
||||
# Deploy command
|
||||
mkdir -p /tmp/charts
|
||||
mv openreplay/charts/{ingress-nginx,assist-stats,quickwit,connector} /tmp/charts/
|
||||
mv openreplay/charts/{ingress-nginx,assist-stats,quickwit,connector,assist-api} /tmp/charts/
|
||||
rm -rf openreplay/charts/*
|
||||
mv /tmp/charts/* openreplay/charts/
|
||||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -f -
|
||||
|
|
|
|||
2
.github/workflows/assist.yaml
vendored
2
.github/workflows/assist.yaml
vendored
|
|
@ -112,7 +112,7 @@ jobs:
|
|||
cat /tmp/image_override.yaml
|
||||
# Deploy command
|
||||
mkdir -p /tmp/charts
|
||||
mv openreplay/charts/{ingress-nginx,assist,quickwit,connector} /tmp/charts/
|
||||
mv openreplay/charts/{ingress-nginx,assist,quickwit,connector,assist-api} /tmp/charts/
|
||||
rm -rf openreplay/charts/*
|
||||
mv /tmp/charts/* openreplay/charts/
|
||||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
|
||||
|
|
|
|||
2
.github/workflows/crons-ee.yaml
vendored
2
.github/workflows/crons-ee.yaml
vendored
|
|
@ -129,7 +129,7 @@ jobs:
|
|||
cat /tmp/image_override.yaml
|
||||
# Deploy command
|
||||
mkdir -p /tmp/charts
|
||||
mv openreplay/charts/{ingress-nginx,utilities,quickwit,connector} /tmp/charts/
|
||||
mv openreplay/charts/{ingress-nginx,utilities,quickwit,connector,assist-api} /tmp/charts/
|
||||
rm -rf openreplay/charts/*
|
||||
mv /tmp/charts/* openreplay/charts/
|
||||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
|
||||
|
|
|
|||
2
.github/workflows/frontend-dev.yaml
vendored
2
.github/workflows/frontend-dev.yaml
vendored
|
|
@ -76,7 +76,7 @@ jobs:
|
|||
cat /tmp/image_override.yaml
|
||||
# Deploy command
|
||||
mkdir -p /tmp/charts
|
||||
mv openreplay/charts/{ingress-nginx,frontend,quickwit,connector} /tmp/charts/
|
||||
mv openreplay/charts/{ingress-nginx,frontend,quickwit,connector,assist-api} /tmp/charts/
|
||||
rm -rf openreplay/charts/*
|
||||
mv /tmp/charts/* openreplay/charts/
|
||||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f -
|
||||
|
|
|
|||
33
.github/workflows/frontend-tests.yaml
vendored
Normal file
33
.github/workflows/frontend-tests.yaml
vendored
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
name: Frontend tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
- '.github/workflows/frontend-test.yaml'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: frontend
|
||||
run: yarn
|
||||
|
||||
- name: Run tests
|
||||
working-directory: frontend
|
||||
run: yarn test:ci
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
directory: frontend/coverage/
|
||||
|
||||
4
.github/workflows/frontend.yaml
vendored
4
.github/workflows/frontend.yaml
vendored
|
|
@ -89,7 +89,7 @@ jobs:
|
|||
cat /tmp/image_override.yaml
|
||||
# Deploy command
|
||||
mkdir -p /tmp/charts
|
||||
mv openreplay/charts/{ingress-nginx,frontend,quickwit,connector} /tmp/charts/
|
||||
mv openreplay/charts/{ingress-nginx,frontend,quickwit,connector,assist-api} /tmp/charts/
|
||||
rm -rf openreplay/charts/*
|
||||
mv /tmp/charts/* openreplay/charts/
|
||||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f -
|
||||
|
|
@ -138,7 +138,7 @@ jobs:
|
|||
cat /tmp/image_override.yaml
|
||||
# Deploy command
|
||||
mkdir -p /tmp/charts
|
||||
mv openreplay/charts/{ingress-nginx,frontend,quickwit,connector} /tmp/charts/
|
||||
mv openreplay/charts/{ingress-nginx,frontend,quickwit,connector,assist-api} /tmp/charts/
|
||||
rm -rf openreplay/charts/*
|
||||
mv /tmp/charts/* openreplay/charts/
|
||||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f -
|
||||
|
|
|
|||
189
.github/workflows/patch-build-old.yaml
vendored
Normal file
189
.github/workflows/patch-build-old.yaml
vendored
Normal file
|
|
@ -0,0 +1,189 @@
|
|||
# Ref: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
services:
|
||||
description: 'Comma separated names of services to build(in small letters).'
|
||||
required: true
|
||||
default: 'chalice,frontend'
|
||||
tag:
|
||||
description: 'Tag to update.'
|
||||
required: true
|
||||
type: string
|
||||
branch:
|
||||
description: 'Branch to build patches from. Make sure the branch is uptodate with tag. Else itll cause missing commits.'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
name: Build patches from tag, rewrite commit HEAD to older timestamp, and Push the tag
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Build Patch from old tag
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
|
||||
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 4
|
||||
ref: ${{ github.event.inputs.tag }}
|
||||
|
||||
- name: Set Remote with GITHUB_TOKEN
|
||||
run: |
|
||||
git config --unset http.https://github.com/.extraheader
|
||||
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git
|
||||
|
||||
- name: Create backup tag with timestamp
|
||||
run: |
|
||||
set -e # Exit immediately if a command exits with a non-zero status
|
||||
TIMESTAMP=$(date +%Y%m%d%H%M%S)
|
||||
BACKUP_TAG="${{ github.event.inputs.tag }}-backup-${TIMESTAMP}"
|
||||
echo "BACKUP_TAG=${BACKUP_TAG}" >> $GITHUB_ENV
|
||||
echo "INPUT_TAG=${{ github.event.inputs.tag }}" >> $GITHUB_ENV
|
||||
git tag $BACKUP_TAG || { echo "Failed to create backup tag"; exit 1; }
|
||||
git push origin $BACKUP_TAG || { echo "Failed to push backup tag"; exit 1; }
|
||||
echo "Created backup tag: $BACKUP_TAG"
|
||||
|
||||
# Get the oldest commit date from the last 3 commits in raw format
|
||||
OLDEST_COMMIT_TIMESTAMP=$(git log -3 --pretty=format:"%at" | tail -1)
|
||||
echo "Oldest commit timestamp: $OLDEST_COMMIT_TIMESTAMP"
|
||||
# Add 1 second to the timestamp
|
||||
NEW_TIMESTAMP=$((OLDEST_COMMIT_TIMESTAMP + 1))
|
||||
echo "NEW_TIMESTAMP=$NEW_TIMESTAMP" >> $GITHUB_ENV
|
||||
|
||||
|
||||
- name: Setup yq
|
||||
uses: mikefarah/yq@master
|
||||
|
||||
# Configure AWS credentials for the first registry
|
||||
- name: Configure AWS credentials for RELEASE_ARM_REGISTRY
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_DEPOT_ACCESS_KEY }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_DEPOT_SECRET_KEY }}
|
||||
aws-region: ${{ secrets.AWS_DEPOT_DEFAULT_REGION }}
|
||||
|
||||
- name: Login to Amazon ECR for RELEASE_ARM_REGISTRY
|
||||
id: login-ecr-arm
|
||||
run: |
|
||||
aws ecr get-login-password --region ${{ secrets.AWS_DEPOT_DEFAULT_REGION }} | docker login --username AWS --password-stdin ${{ secrets.RELEASE_ARM_REGISTRY }}
|
||||
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }}
|
||||
|
||||
- uses: depot/setup-action@v1
|
||||
- name: Get HEAD Commit ID
|
||||
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||
- name: Define Branch Name
|
||||
run: echo "BRANCH_NAME=${{inputs.branch}}" >> $GITHUB_ENV
|
||||
|
||||
- name: Build
|
||||
id: build-image
|
||||
env:
|
||||
DOCKER_REPO_ARM: ${{ secrets.RELEASE_ARM_REGISTRY }}
|
||||
DOCKER_REPO_OSS: ${{ secrets.RELEASE_OSS_REGISTRY }}
|
||||
MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
|
||||
MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
|
||||
MSAAS_REPO_FOLDER: /tmp/msaas
|
||||
run: |
|
||||
set -exo pipefail
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
git checkout -b $BRANCH_NAME
|
||||
working_dir=$(pwd)
|
||||
function image_version(){
|
||||
local service=$1
|
||||
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
||||
current_version=$(yq eval '.AppVersion' $chart_path)
|
||||
new_version=$(echo $current_version | awk -F. '{$NF += 1 ; print $1"."$2"."$3}')
|
||||
echo $new_version
|
||||
# yq eval ".AppVersion = \"$new_version\"" -i $chart_path
|
||||
}
|
||||
function clone_msaas() {
|
||||
[ -d $MSAAS_REPO_FOLDER ] || {
|
||||
git clone -b $INPUT_TAG --recursive https://x-access-token:$MSAAS_REPO_CLONE_TOKEN@$MSAAS_REPO_URL $MSAAS_REPO_FOLDER
|
||||
cd $MSAAS_REPO_FOLDER
|
||||
cd openreplay && git fetch origin && git checkout $INPUT_TAG
|
||||
git log -1
|
||||
cd $MSAAS_REPO_FOLDER
|
||||
bash git-init.sh
|
||||
git checkout
|
||||
}
|
||||
}
|
||||
function build_managed() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
echo building managed
|
||||
clone_msaas
|
||||
if [[ $service == 'chalice' ]]; then
|
||||
cd $MSAAS_REPO_FOLDER/openreplay/api
|
||||
else
|
||||
cd $MSAAS_REPO_FOLDER/openreplay/$service
|
||||
fi
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh >> /tmp/arm.txt
|
||||
}
|
||||
# Checking for backend images
|
||||
ls backend/cmd >> /tmp/backend.txt
|
||||
echo Services: "${{ github.event.inputs.services }}"
|
||||
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}"
|
||||
BUILD_SCRIPT_NAME="build.sh"
|
||||
# Build FOSS
|
||||
for SERVICE in "${SERVICES[@]}"; do
|
||||
# Check if service is backend
|
||||
if grep -q $SERVICE /tmp/backend.txt; then
|
||||
cd backend
|
||||
foss_build_args="nil $SERVICE"
|
||||
ee_build_args="ee $SERVICE"
|
||||
else
|
||||
[[ $SERVICE == 'chalice' || $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && cd $working_dir/api || cd $SERVICE
|
||||
[[ $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && BUILD_SCRIPT_NAME="build_${SERVICE}.sh"
|
||||
ee_build_args="ee"
|
||||
fi
|
||||
version=$(image_version $SERVICE)
|
||||
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
echo IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
|
||||
IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
|
||||
if [[ "$SERVICE" != "chalice" && "$SERVICE" != "frontend" ]]; then
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
else
|
||||
build_managed $SERVICE $version
|
||||
fi
|
||||
cd $working_dir
|
||||
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$SERVICE/Chart.yaml"
|
||||
yq eval ".AppVersion = \"$version\"" -i $chart_path
|
||||
git add $chart_path
|
||||
git commit -m "Increment $SERVICE chart version"
|
||||
done
|
||||
|
||||
- name: Change commit timestamp
|
||||
run: |
|
||||
# Convert the timestamp to a date format git can understand
|
||||
NEW_DATE=$(perl -le 'print scalar gmtime($ARGV[0])." +0000"' $NEW_TIMESTAMP)
|
||||
echo "Setting commit date to: $NEW_DATE"
|
||||
|
||||
# Amend the commit with the new date
|
||||
GIT_COMMITTER_DATE="$NEW_DATE" git commit --amend --no-edit --date="$NEW_DATE"
|
||||
|
||||
# Verify the change
|
||||
git log -1 --pretty=format:"Commit now dated: %cD"
|
||||
|
||||
# git tag and push
|
||||
git tag $INPUT_TAG -f
|
||||
git push origin $INPUT_TAG -f
|
||||
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO_ARM: ${{ secrets.RELEASE_ARM_REGISTRY }}
|
||||
# DOCKER_REPO_OSS: ${{ secrets.RELEASE_OSS_REGISTRY }}
|
||||
# MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
|
||||
# MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
|
||||
# MSAAS_REPO_FOLDER: /tmp/msaas
|
||||
# with:
|
||||
# limit-access-to-actor: true
|
||||
246
.github/workflows/patch-build.yaml
vendored
246
.github/workflows/patch-build.yaml
vendored
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
description: 'This workflow will build for patches for latest tag, and will Always use commit from main branch.'
|
||||
inputs:
|
||||
services:
|
||||
description: 'Comma separated names of services to build(in small letters).'
|
||||
|
|
@ -20,12 +19,20 @@ jobs:
|
|||
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Rebase with main branch, to make sure the code has latest main changes
|
||||
if: github.ref != 'refs/heads/main'
|
||||
run: |
|
||||
git pull --rebase origin main
|
||||
git remote -v
|
||||
git config --global user.email "action@github.com"
|
||||
git config --global user.name "GitHub Action"
|
||||
git config --global rebase.autoStash true
|
||||
git fetch origin main:main
|
||||
git rebase main
|
||||
git log -3
|
||||
|
||||
- name: Downloading yq
|
||||
run: |
|
||||
|
|
@ -48,6 +55,8 @@ jobs:
|
|||
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }}
|
||||
|
||||
- uses: depot/setup-action@v1
|
||||
env:
|
||||
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
|
||||
- name: Get HEAD Commit ID
|
||||
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||
- name: Define Branch Name
|
||||
|
|
@ -65,78 +74,168 @@ jobs:
|
|||
MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
|
||||
MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
|
||||
MSAAS_REPO_FOLDER: /tmp/msaas
|
||||
SERVICES_INPUT: ${{ github.event.inputs.services }}
|
||||
run: |
|
||||
set -exo pipefail
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
git checkout -b $BRANCH_NAME
|
||||
working_dir=$(pwd)
|
||||
function image_version(){
|
||||
local service=$1
|
||||
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
||||
current_version=$(yq eval '.AppVersion' $chart_path)
|
||||
new_version=$(echo $current_version | awk -F. '{$NF += 1 ; print $1"."$2"."$3}')
|
||||
echo $new_version
|
||||
# yq eval ".AppVersion = \"$new_version\"" -i $chart_path
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
# Configuration
|
||||
readonly WORKING_DIR=$(pwd)
|
||||
readonly BUILD_SCRIPT_NAME="build.sh"
|
||||
readonly BACKEND_SERVICES_FILE="/tmp/backend.txt"
|
||||
|
||||
# Initialize git configuration
|
||||
setup_git() {
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
git checkout -b "$BRANCH_NAME"
|
||||
}
|
||||
function clone_msaas() {
|
||||
[ -d $MSAAS_REPO_FOLDER ] || {
|
||||
git clone -b dev --recursive https://x-access-token:$MSAAS_REPO_CLONE_TOKEN@$MSAAS_REPO_URL $MSAAS_REPO_FOLDER
|
||||
cd $MSAAS_REPO_FOLDER
|
||||
cd openreplay && git fetch origin && git checkout main # This have to be changed to specific tag
|
||||
git log -1
|
||||
cd $MSAAS_REPO_FOLDER
|
||||
bash git-init.sh
|
||||
git checkout
|
||||
}
|
||||
|
||||
# Get and increment image version
|
||||
image_version() {
|
||||
local service=$1
|
||||
local chart_path="$WORKING_DIR/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
||||
local current_version new_version
|
||||
|
||||
current_version=$(yq eval '.AppVersion' "$chart_path")
|
||||
new_version=$(echo "$current_version" | awk -F. '{$NF += 1; print $1"."$2"."$3}')
|
||||
echo "$new_version"
|
||||
}
|
||||
function build_managed() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
echo building managed
|
||||
clone_msaas
|
||||
if [[ $service == 'chalice' ]]; then
|
||||
cd $MSAAS_REPO_FOLDER/openreplay/api
|
||||
else
|
||||
cd $MSAAS_REPO_FOLDER/openreplay/$service
|
||||
fi
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh >> /tmp/arm.txt
|
||||
|
||||
# Clone MSAAS repository if not exists
|
||||
clone_msaas() {
|
||||
if [[ ! -d "$MSAAS_REPO_FOLDER" ]]; then
|
||||
git clone -b dev --recursive "https://x-access-token:${MSAAS_REPO_CLONE_TOKEN}@${MSAAS_REPO_URL}" "$MSAAS_REPO_FOLDER"
|
||||
cd "$MSAAS_REPO_FOLDER"
|
||||
cd openreplay && git fetch origin && git checkout main
|
||||
git log -1
|
||||
cd "$MSAAS_REPO_FOLDER"
|
||||
bash git-init.sh
|
||||
git checkout
|
||||
fi
|
||||
}
|
||||
# Checking for backend images
|
||||
ls backend/cmd >> /tmp/backend.txt
|
||||
echo Services: "${{ github.event.inputs.services }}"
|
||||
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}"
|
||||
BUILD_SCRIPT_NAME="build.sh"
|
||||
# Build FOSS
|
||||
for SERVICE in "${SERVICES[@]}"; do
|
||||
# Check if service is backend
|
||||
if grep -q $SERVICE /tmp/backend.txt; then
|
||||
cd backend
|
||||
foss_build_args="nil $SERVICE"
|
||||
ee_build_args="ee $SERVICE"
|
||||
else
|
||||
[[ $SERVICE == 'chalice' || $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && cd $working_dir/api || cd $SERVICE
|
||||
[[ $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && BUILD_SCRIPT_NAME="build_${SERVICE}.sh"
|
||||
ee_build_args="ee"
|
||||
fi
|
||||
version=$(image_version $SERVICE)
|
||||
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
echo IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
|
||||
IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
|
||||
if [[ "$SERVICE" != "chalice" && "$SERVICE" != "frontend" ]]; then
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
else
|
||||
build_managed $SERVICE $version
|
||||
fi
|
||||
cd $working_dir
|
||||
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$SERVICE/Chart.yaml"
|
||||
yq eval ".AppVersion = \"$version\"" -i $chart_path
|
||||
git add $chart_path
|
||||
git commit -m "Increment $SERVICE chart version"
|
||||
git push --set-upstream origin $BRANCH_NAME
|
||||
done
|
||||
|
||||
# Build managed services
|
||||
build_managed() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
|
||||
echo "Building managed service: $service"
|
||||
clone_msaas
|
||||
|
||||
if [[ $service == 'chalice' ]]; then
|
||||
cd "$MSAAS_REPO_FOLDER/openreplay/api"
|
||||
else
|
||||
cd "$MSAAS_REPO_FOLDER/openreplay/$service"
|
||||
fi
|
||||
|
||||
local build_cmd="IMAGE_TAG=$version DOCKER_RUNTIME=depot DOCKER_BUILD_ARGS=--push ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh"
|
||||
|
||||
echo "Executing: $build_cmd"
|
||||
if ! eval "$build_cmd" 2>&1; then
|
||||
echo "Build failed for $service"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Build service with given arguments
|
||||
build_service() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
local build_args=$3
|
||||
local build_script=${4:-$BUILD_SCRIPT_NAME}
|
||||
|
||||
local command="IMAGE_TAG=$version DOCKER_RUNTIME=depot DOCKER_BUILD_ARGS=--push ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash $build_script $build_args"
|
||||
echo "Executing: $command"
|
||||
eval "$command"
|
||||
}
|
||||
|
||||
# Update chart version and commit changes
|
||||
update_chart_version() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
local chart_path="$WORKING_DIR/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
||||
|
||||
# Ensure we're in the original working directory/repository
|
||||
cd "$WORKING_DIR"
|
||||
yq eval ".AppVersion = \"$version\"" -i "$chart_path"
|
||||
git add "$chart_path"
|
||||
git commit -m "Increment $service chart version to $version"
|
||||
git push --set-upstream origin "$BRANCH_NAME"
|
||||
cd -
|
||||
}
|
||||
|
||||
# Main execution
|
||||
main() {
|
||||
setup_git
|
||||
|
||||
# Get backend services list
|
||||
ls backend/cmd >"$BACKEND_SERVICES_FILE"
|
||||
|
||||
# Parse services input (fix for GitHub Actions syntax)
|
||||
echo "Services: ${SERVICES_INPUT:-$1}"
|
||||
IFS=',' read -ra services <<<"${SERVICES_INPUT:-$1}"
|
||||
|
||||
# Process each service
|
||||
for service in "${services[@]}"; do
|
||||
echo "Processing service: $service"
|
||||
cd "$WORKING_DIR"
|
||||
|
||||
local foss_build_args="" ee_build_args="" build_script="$BUILD_SCRIPT_NAME"
|
||||
|
||||
# Determine build configuration based on service type
|
||||
if grep -q "$service" "$BACKEND_SERVICES_FILE"; then
|
||||
# Backend service
|
||||
cd backend
|
||||
foss_build_args="nil $service"
|
||||
ee_build_args="ee $service"
|
||||
else
|
||||
# Non-backend service
|
||||
case "$service" in
|
||||
chalice | alerts | crons)
|
||||
cd "$WORKING_DIR/api"
|
||||
;;
|
||||
*)
|
||||
cd "$service"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Special build scripts for alerts/crons
|
||||
if [[ $service == 'alerts' || $service == 'crons' ]]; then
|
||||
build_script="build_${service}.sh"
|
||||
fi
|
||||
|
||||
ee_build_args="ee"
|
||||
fi
|
||||
|
||||
# Get version and build
|
||||
local version
|
||||
version=$(image_version "$service")
|
||||
|
||||
# Build FOSS and EE versions
|
||||
build_service "$service" "$version" "$foss_build_args"
|
||||
build_service "$service" "${version}-ee" "$ee_build_args"
|
||||
|
||||
# Build managed version for specific services
|
||||
if [[ "$service" != "chalice" && "$service" != "frontend" ]]; then
|
||||
echo "Nothing to build in managed for service $service"
|
||||
else
|
||||
build_managed "$service" "$version"
|
||||
fi
|
||||
|
||||
# Update chart and commit
|
||||
update_chart_version "$service" "$version"
|
||||
done
|
||||
cd "$WORKING_DIR"
|
||||
|
||||
# Cleanup
|
||||
rm -f "$BACKEND_SERVICES_FILE"
|
||||
}
|
||||
|
||||
echo "Working directory: $WORKING_DIR"
|
||||
# Run main function with all arguments
|
||||
main "$SERVICES_INPUT"
|
||||
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: repo-sync/pull-request@v2
|
||||
|
|
@ -147,8 +246,7 @@ jobs:
|
|||
pr_title: "Updated patch build from main ${{ env.HEAD_COMMIT_ID }}"
|
||||
pr_body: |
|
||||
This PR updates the Helm chart version after building the patch from $HEAD_COMMIT_ID.
|
||||
Once this PR is merged, To update the latest tag, run the following workflow.
|
||||
https://github.com/openreplay/openreplay/actions/workflows/update-tag.yaml
|
||||
Once this PR is merged, tag update job will run automatically.
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
|
|
|
|||
2
.github/workflows/sourcemaps-reader-ee.yaml
vendored
2
.github/workflows/sourcemaps-reader-ee.yaml
vendored
|
|
@ -119,7 +119,7 @@ jobs:
|
|||
cat /tmp/image_override.yaml
|
||||
# Deploy command
|
||||
mkdir -p /tmp/charts
|
||||
mv openreplay/charts/{ingress-nginx,sourcemapreader,quickwit,connector} /tmp/charts/
|
||||
mv openreplay/charts/{ingress-nginx,sourcemapreader,quickwit,connector,assist-api} /tmp/charts/
|
||||
rm -rf openreplay/charts/*
|
||||
mv /tmp/charts/* openreplay/charts/
|
||||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f -
|
||||
|
|
|
|||
2
.github/workflows/sourcemaps-reader.yaml
vendored
2
.github/workflows/sourcemaps-reader.yaml
vendored
|
|
@ -118,7 +118,7 @@ jobs:
|
|||
cat /tmp/image_override.yaml
|
||||
# Deploy command
|
||||
mkdir -p /tmp/charts
|
||||
mv openreplay/charts/{ingress-nginx,sourcemapreader,quickwit,connector} /tmp/charts/
|
||||
mv openreplay/charts/{ingress-nginx,sourcemapreader,quickwit,connector,assist-api} /tmp/charts/
|
||||
rm -rf openreplay/charts/*
|
||||
mv /tmp/charts/* openreplay/charts/
|
||||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f -
|
||||
|
|
|
|||
18
.github/workflows/tracker-tests.yaml
vendored
18
.github/workflows/tracker-tests.yaml
vendored
|
|
@ -22,22 +22,14 @@ jobs:
|
|||
- name: Cache tracker modules
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: tracker/tracker/node_modules
|
||||
key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/bun.lockb') }}
|
||||
restore-keys: |
|
||||
test_tracker_build{{ runner.OS }}-build-
|
||||
test_tracker_build{{ runner.OS }}-
|
||||
- name: Cache tracker-assist modules
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: tracker/tracker-assist/node_modules
|
||||
key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/bun.lockb') }}
|
||||
path: tracker/node_modules
|
||||
key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/bun.lock') }}
|
||||
restore-keys: |
|
||||
test_tracker_build{{ runner.OS }}-build-
|
||||
test_tracker_build{{ runner.OS }}-
|
||||
- name: Setup Testing packages
|
||||
run: |
|
||||
cd tracker/tracker
|
||||
cd tracker
|
||||
bun install
|
||||
- name: Jest tests
|
||||
run: |
|
||||
|
|
@ -47,10 +39,6 @@ jobs:
|
|||
run: |
|
||||
cd tracker/tracker
|
||||
bun run build
|
||||
- name: (TA) Setup Testing packages
|
||||
run: |
|
||||
cd tracker/tracker-assist
|
||||
bun install
|
||||
- name: (TA) Jest tests
|
||||
run: |
|
||||
cd tracker/tracker-assist
|
||||
|
|
|
|||
44
.github/workflows/update-tag.yaml
vendored
44
.github/workflows/update-tag.yaml
vendored
|
|
@ -1,35 +1,43 @@
|
|||
on:
|
||||
workflow_dispatch:
|
||||
description: "This workflow will build for patches for latest tag, and will Always use commit from main branch."
|
||||
inputs:
|
||||
services:
|
||||
description: "This action will update the latest tag with current main branch HEAD. Should I proceed ? true/false"
|
||||
required: true
|
||||
default: "false"
|
||||
|
||||
name: Force Push tag with main branch HEAD
|
||||
pull_request:
|
||||
types: [closed]
|
||||
branches:
|
||||
- main
|
||||
name: Release tag update --force
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Build Patch from main
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
|
||||
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
|
||||
if: ${{ (github.event_name == 'pull_request' && github.event.pull_request.merged == true) || github.event.inputs.services == 'true' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Get latest release tag using GitHub API
|
||||
id: get-latest-tag
|
||||
run: |
|
||||
LATEST_TAG=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||
"https://api.github.com/repos/${{ github.repository }}/releases/latest" \
|
||||
| jq -r .tag_name)
|
||||
|
||||
# Fallback to git command if API doesn't return a tag
|
||||
if [ "$LATEST_TAG" == "null" ] || [ -z "$LATEST_TAG" ]; then
|
||||
echo "Not found latest tag"
|
||||
exit 100
|
||||
fi
|
||||
|
||||
echo "LATEST_TAG=$LATEST_TAG" >> $GITHUB_ENV
|
||||
echo "Latest tag: $LATEST_TAG"
|
||||
|
||||
- name: Set Remote with GITHUB_TOKEN
|
||||
run: |
|
||||
git config --unset http.https://github.com/.extraheader
|
||||
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git
|
||||
|
||||
- name: Push main branch to tag
|
||||
run: |
|
||||
git fetch --tags
|
||||
git checkout main
|
||||
git push origin HEAD:refs/tags/$(git tag --list 'v[0-9]*' --sort=-v:refname | head -n 1) --force
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# with:
|
||||
# limit-access-to-actor: true
|
||||
echo "Updating tag ${{ env.LATEST_TAG }} to point to latest commit on main"
|
||||
git push origin HEAD:refs/tags/${{ env.LATEST_TAG }} --force
|
||||
|
|
|
|||
4
.github/workflows/workers-ee.yaml
vendored
4
.github/workflows/workers-ee.yaml
vendored
|
|
@ -148,9 +148,7 @@ jobs:
|
|||
set -x
|
||||
echo > /tmp/image_override.yaml
|
||||
mkdir /tmp/helmcharts
|
||||
mv openreplay/charts/ingress-nginx /tmp/helmcharts/
|
||||
mv openreplay/charts/quickwit /tmp/helmcharts/
|
||||
mv openreplay/charts/connector /tmp/helmcharts/
|
||||
mv openreplay/charts/{ingress-nginx,quickwit,connector,assist-api} /tmp/helmcharts/
|
||||
## Update images
|
||||
for image in $(cat /tmp/images_to_build.txt);
|
||||
do
|
||||
|
|
|
|||
4
.github/workflows/workers.yaml
vendored
4
.github/workflows/workers.yaml
vendored
|
|
@ -141,9 +141,7 @@ jobs:
|
|||
set -x
|
||||
echo > /tmp/image_override.yaml
|
||||
mkdir /tmp/helmcharts
|
||||
mv openreplay/charts/ingress-nginx /tmp/helmcharts/
|
||||
mv openreplay/charts/quickwit /tmp/helmcharts/
|
||||
mv openreplay/charts/connector /tmp/helmcharts/
|
||||
mv openreplay/charts/{ingress-nginx,quickwit,connector,assist-api} /tmp/helmcharts/
|
||||
## Update images
|
||||
for image in $(cat /tmp/images_to_build.txt);
|
||||
do
|
||||
|
|
|
|||
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -7,3 +7,5 @@ node_modules
|
|||
**/*.envrc
|
||||
.idea
|
||||
*.mob*
|
||||
install-state.gz
|
||||
frontend/tests/playwright/auth-state.json
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
repos:
|
||||
- repo: https://github.com/gitguardian/ggshield
|
||||
rev: v1.14.5
|
||||
rev: v1.38.0
|
||||
hooks:
|
||||
- id: ggshield
|
||||
language_version: python3
|
||||
stages: [commit]
|
||||
stages: [pre-commit]
|
||||
|
|
|
|||
22
api/Pipfile
22
api/Pipfile
|
|
@ -4,26 +4,24 @@ verify_ssl = true
|
|||
name = "pypi"
|
||||
|
||||
[packages]
|
||||
urllib3 = "==2.3.0"
|
||||
urllib3 = "==2.4.0"
|
||||
requests = "==2.32.3"
|
||||
boto3 = "==1.36.12"
|
||||
boto3 = "==1.38.16"
|
||||
pyjwt = "==2.10.1"
|
||||
psycopg2-binary = "==2.9.10"
|
||||
psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
|
||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
|
||||
clickhouse-connect = "==0.8.15"
|
||||
elasticsearch = "==8.17.1"
|
||||
psycopg = {extras = ["binary", "pool"], version = "==3.2.9"}
|
||||
clickhouse-connect = "==0.8.17"
|
||||
elasticsearch = "==9.0.1"
|
||||
jira = "==3.8.0"
|
||||
cachetools = "==5.5.1"
|
||||
fastapi = "==0.115.8"
|
||||
uvicorn = {extras = ["standard"], version = "==0.34.0"}
|
||||
cachetools = "==5.5.2"
|
||||
fastapi = "==0.115.12"
|
||||
uvicorn = {extras = ["standard"], version = "==0.34.2"}
|
||||
python-decouple = "==3.8"
|
||||
pydantic = {extras = ["email"], version = "==2.10.6"}
|
||||
pydantic = {extras = ["email"], version = "==2.11.4"}
|
||||
apscheduler = "==3.11.0"
|
||||
redis = "==5.2.1"
|
||||
redis = "==6.1.0"
|
||||
|
||||
[dev-packages]
|
||||
|
||||
[requires]
|
||||
python_version = "3.12"
|
||||
python_full_version = "3.12.8"
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ from chalicelib.utils import helper
|
|||
from chalicelib.utils import pg_client, ch_client
|
||||
from crons import core_crons, core_dynamic_crons
|
||||
from routers import core, core_dynamic
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_analytics
|
||||
|
||||
loglevel = config("LOGLEVEL", default=logging.WARNING)
|
||||
print(f">Loglevel set to: {loglevel}")
|
||||
|
|
@ -129,6 +129,6 @@ app.include_router(spot.public_app)
|
|||
app.include_router(spot.app)
|
||||
app.include_router(spot.app_apikey)
|
||||
|
||||
app.include_router(product_anaytics.public_app)
|
||||
app.include_router(product_anaytics.app)
|
||||
app.include_router(product_anaytics.app_apikey)
|
||||
app.include_router(product_analytics.public_app, prefix="/pa")
|
||||
app.include_router(product_analytics.app, prefix="/pa")
|
||||
app.include_router(product_analytics.app_apikey, prefix="/pa")
|
||||
|
|
|
|||
|
|
@ -0,0 +1,11 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
|
||||
if config("EXP_AUTOCOMPLETE", cast=bool, default=False):
|
||||
logging.info(">>> Using experimental autocomplete")
|
||||
from . import autocomplete_ch as autocomplete
|
||||
else:
|
||||
from . import autocomplete
|
||||
|
|
@ -1,10 +1,9 @@
|
|||
import logging
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import countries, events, metadata
|
||||
from chalicelib.core import countries, metadata
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.utils.event_filter_definition import Event
|
||||
from chalicelib.utils.or_cache import CachedResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
TABLE = "public.autocomplete"
|
||||
|
|
@ -85,7 +84,8 @@ def __generic_query(typename, value_length=None):
|
|||
ORDER BY value"""
|
||||
|
||||
if value_length is None or value_length > 2:
|
||||
return f"""(SELECT DISTINCT value, type
|
||||
return f"""SELECT DISTINCT ON(value,type) value, type
|
||||
((SELECT DISTINCT value, type
|
||||
FROM {TABLE}
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
|
|
@ -101,7 +101,7 @@ def __generic_query(typename, value_length=None):
|
|||
AND type='{typename.upper()}'
|
||||
AND value ILIKE %(value)s
|
||||
ORDER BY value
|
||||
LIMIT 5);"""
|
||||
LIMIT 5)) AS raw;"""
|
||||
return f"""SELECT DISTINCT value, type
|
||||
FROM {TABLE}
|
||||
WHERE
|
||||
|
|
@ -112,10 +112,10 @@ def __generic_query(typename, value_length=None):
|
|||
LIMIT 10;"""
|
||||
|
||||
|
||||
def __generic_autocomplete(event: Event):
|
||||
def __generic_autocomplete(event: str):
|
||||
def f(project_id, value, key=None, source=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = __generic_query(event.ui_type, value_length=len(value))
|
||||
query = __generic_query(event, value_length=len(value))
|
||||
params = {"project_id": project_id, "value": helper.string_to_sql_like(value),
|
||||
"svalue": helper.string_to_sql_like("^" + value)}
|
||||
cur.execute(cur.mogrify(query, params))
|
||||
|
|
@ -148,8 +148,8 @@ def __errors_query(source=None, value_length=None):
|
|||
return f"""((SELECT DISTINCT ON(lg.message)
|
||||
lg.message AS value,
|
||||
source,
|
||||
'{events.EventType.ERROR.ui_type}' AS type
|
||||
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{schemas.EventType.ERROR}' AS type
|
||||
FROM events.errors INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.message ILIKE %(svalue)s
|
||||
|
|
@ -160,8 +160,8 @@ def __errors_query(source=None, value_length=None):
|
|||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
source,
|
||||
'{events.EventType.ERROR.ui_type}' AS type
|
||||
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{schemas.EventType.ERROR}' AS type
|
||||
FROM events.errors INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.name ILIKE %(svalue)s
|
||||
|
|
@ -172,8 +172,8 @@ def __errors_query(source=None, value_length=None):
|
|||
(SELECT DISTINCT ON(lg.message)
|
||||
lg.message AS value,
|
||||
source,
|
||||
'{events.EventType.ERROR.ui_type}' AS type
|
||||
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{schemas.EventType.ERROR}' AS type
|
||||
FROM events.errors INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.message ILIKE %(value)s
|
||||
|
|
@ -184,8 +184,8 @@ def __errors_query(source=None, value_length=None):
|
|||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
source,
|
||||
'{events.EventType.ERROR.ui_type}' AS type
|
||||
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{schemas.EventType.ERROR}' AS type
|
||||
FROM events.errors INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.name ILIKE %(value)s
|
||||
|
|
@ -195,8 +195,8 @@ def __errors_query(source=None, value_length=None):
|
|||
return f"""((SELECT DISTINCT ON(lg.message)
|
||||
lg.message AS value,
|
||||
source,
|
||||
'{events.EventType.ERROR.ui_type}' AS type
|
||||
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{schemas.EventType.ERROR}' AS type
|
||||
FROM events.errors INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.message ILIKE %(svalue)s
|
||||
|
|
@ -207,8 +207,8 @@ def __errors_query(source=None, value_length=None):
|
|||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
source,
|
||||
'{events.EventType.ERROR.ui_type}' AS type
|
||||
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{schemas.EventType.ERROR}' AS type
|
||||
FROM events.errors INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.name ILIKE %(svalue)s
|
||||
|
|
@ -233,8 +233,8 @@ def __search_errors_mobile(project_id, value, key=None, source=None):
|
|||
if len(value) > 2:
|
||||
query = f"""(SELECT DISTINCT ON(lg.reason)
|
||||
lg.reason AS value,
|
||||
'{events.EventType.CRASH_MOBILE.ui_type}' AS type
|
||||
FROM {events.EventType.CRASH_MOBILE.table} INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{schemas.EventType.ERROR_MOBILE}' AS type
|
||||
FROM events_common.crashes INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
|
|
@ -243,8 +243,8 @@ def __search_errors_mobile(project_id, value, key=None, source=None):
|
|||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
'{events.EventType.CRASH_MOBILE.ui_type}' AS type
|
||||
FROM {events.EventType.CRASH_MOBILE.table} INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{schemas.EventType.ERROR_MOBILE}' AS type
|
||||
FROM events_common.crashes INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
|
|
@ -253,8 +253,8 @@ def __search_errors_mobile(project_id, value, key=None, source=None):
|
|||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.reason)
|
||||
lg.reason AS value,
|
||||
'{events.EventType.CRASH_MOBILE.ui_type}' AS type
|
||||
FROM {events.EventType.CRASH_MOBILE.table} INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{schemas.EventType.ERROR_MOBILE}' AS type
|
||||
FROM events_common.crashes INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
|
|
@ -263,8 +263,8 @@ def __search_errors_mobile(project_id, value, key=None, source=None):
|
|||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
'{events.EventType.CRASH_MOBILE.ui_type}' AS type
|
||||
FROM {events.EventType.CRASH_MOBILE.table} INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{schemas.EventType.ERROR_MOBILE}' AS type
|
||||
FROM events_common.crashes INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
|
|
@ -273,8 +273,8 @@ def __search_errors_mobile(project_id, value, key=None, source=None):
|
|||
else:
|
||||
query = f"""(SELECT DISTINCT ON(lg.reason)
|
||||
lg.reason AS value,
|
||||
'{events.EventType.CRASH_MOBILE.ui_type}' AS type
|
||||
FROM {events.EventType.CRASH_MOBILE.table} INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{schemas.EventType.ERROR_MOBILE}' AS type
|
||||
FROM events_common.crashes INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
|
|
@ -283,8 +283,8 @@ def __search_errors_mobile(project_id, value, key=None, source=None):
|
|||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
'{events.EventType.CRASH_MOBILE.ui_type}' AS type
|
||||
FROM {events.EventType.CRASH_MOBILE.table} INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{schemas.EventType.ERROR_MOBILE}' AS type
|
||||
FROM events_common.crashes INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
|
|
@ -326,7 +326,7 @@ def __search_metadata(project_id, value, key=None, source=None):
|
|||
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(f"""\
|
||||
SELECT key, value, 'METADATA' AS TYPE
|
||||
SELECT DISTINCT ON(key, value) key, value, 'METADATA' AS TYPE
|
||||
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
|
||||
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
|
||||
"svalue": helper.string_to_sql_like("^" + value)}))
|
||||
|
|
@ -376,7 +376,6 @@ def is_top_supported(event_type):
|
|||
return TYPE_TO_COLUMN.get(event_type, False)
|
||||
|
||||
|
||||
@CachedResponse(table="or_cache.autocomplete_top_values", ttl=5 * 60)
|
||||
def get_top_values(project_id, event_type, event_key=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
if schemas.FilterType.has_value(event_type):
|
||||
|
|
|
|||
|
|
@ -1,10 +1,9 @@
|
|||
import logging
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import countries, events, metadata
|
||||
from chalicelib.core import countries, metadata
|
||||
from chalicelib.utils import ch_client
|
||||
from chalicelib.utils import helper, exp_ch_helper
|
||||
from chalicelib.utils.event_filter_definition import Event
|
||||
from chalicelib.utils.or_cache import CachedResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
TABLE = "experimental.autocomplete"
|
||||
|
|
@ -86,7 +85,8 @@ def __generic_query(typename, value_length=None):
|
|||
ORDER BY value"""
|
||||
|
||||
if value_length is None or value_length > 2:
|
||||
return f"""(SELECT DISTINCT value, type
|
||||
return f"""SELECT DISTINCT ON(value, type) value, type
|
||||
FROM ((SELECT DISTINCT value, type
|
||||
FROM {TABLE}
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
|
|
@ -102,7 +102,7 @@ def __generic_query(typename, value_length=None):
|
|||
AND type='{typename.upper()}'
|
||||
AND value ILIKE %(value)s
|
||||
ORDER BY value
|
||||
LIMIT 5);"""
|
||||
LIMIT 5)) AS raw;"""
|
||||
return f"""SELECT DISTINCT value, type
|
||||
FROM {TABLE}
|
||||
WHERE
|
||||
|
|
@ -113,7 +113,7 @@ def __generic_query(typename, value_length=None):
|
|||
LIMIT 10;"""
|
||||
|
||||
|
||||
def __generic_autocomplete(event: Event):
|
||||
def __generic_autocomplete(event: str):
|
||||
def f(project_id, value, key=None, source=None):
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = __generic_query(event.ui_type, value_length=len(value))
|
||||
|
|
@ -149,7 +149,7 @@ def __pg_errors_query(source=None, value_length=None):
|
|||
return f"""((SELECT DISTINCT ON(message)
|
||||
message AS value,
|
||||
source,
|
||||
'{events.EventType.ERROR.ui_type}' AS type
|
||||
'{schemas.EventType.ERROR}' AS type
|
||||
FROM {MAIN_TABLE}
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
|
|
@ -161,7 +161,7 @@ def __pg_errors_query(source=None, value_length=None):
|
|||
(SELECT DISTINCT ON(name)
|
||||
name AS value,
|
||||
source,
|
||||
'{events.EventType.ERROR.ui_type}' AS type
|
||||
'{schemas.EventType.ERROR}' AS type
|
||||
FROM {MAIN_TABLE}
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
|
|
@ -172,7 +172,7 @@ def __pg_errors_query(source=None, value_length=None):
|
|||
(SELECT DISTINCT ON(message)
|
||||
message AS value,
|
||||
source,
|
||||
'{events.EventType.ERROR.ui_type}' AS type
|
||||
'{schemas.EventType.ERROR}' AS type
|
||||
FROM {MAIN_TABLE}
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
|
|
@ -183,7 +183,7 @@ def __pg_errors_query(source=None, value_length=None):
|
|||
(SELECT DISTINCT ON(name)
|
||||
name AS value,
|
||||
source,
|
||||
'{events.EventType.ERROR.ui_type}' AS type
|
||||
'{schemas.EventType.ERROR}' AS type
|
||||
FROM {MAIN_TABLE}
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
|
|
@ -193,7 +193,7 @@ def __pg_errors_query(source=None, value_length=None):
|
|||
return f"""((SELECT DISTINCT ON(message)
|
||||
message AS value,
|
||||
source,
|
||||
'{events.EventType.ERROR.ui_type}' AS type
|
||||
'{schemas.EventType.ERROR}' AS type
|
||||
FROM {MAIN_TABLE}
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
|
|
@ -204,7 +204,7 @@ def __pg_errors_query(source=None, value_length=None):
|
|||
(SELECT DISTINCT ON(name)
|
||||
name AS value,
|
||||
source,
|
||||
'{events.EventType.ERROR.ui_type}' AS type
|
||||
'{schemas.EventType.ERROR}' AS type
|
||||
FROM {MAIN_TABLE}
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
|
|
@ -257,10 +257,11 @@ def __search_metadata(project_id, value, key=None, source=None):
|
|||
WHERE project_id = %(project_id)s
|
||||
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = cur.format(query=f"""SELECT key, value, 'METADATA' AS TYPE
|
||||
query = cur.format(query=f"""SELECT DISTINCT ON(key, value) key, value, 'METADATA' AS TYPE
|
||||
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
|
||||
LIMIT 5;""", parameters={"project_id": project_id, "value": helper.string_to_sql_like(value),
|
||||
"svalue": helper.string_to_sql_like("^" + value)})
|
||||
LIMIT 5;""",
|
||||
parameters={"project_id": project_id, "value": helper.string_to_sql_like(value),
|
||||
"svalue": helper.string_to_sql_like("^" + value)})
|
||||
results = cur.execute(query)
|
||||
return helper.list_to_camel_case(results)
|
||||
|
||||
|
|
@ -297,7 +298,6 @@ def is_top_supported(event_type):
|
|||
return TYPE_TO_COLUMN.get(event_type, False)
|
||||
|
||||
|
||||
@CachedResponse(table="or_cache.autocomplete_top_values", ttl=5 * 60)
|
||||
def get_top_values(project_id, event_type, event_key=None):
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
if schemas.FilterType.has_value(event_type):
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
import logging
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import metadata
|
||||
from chalicelib.core.errors import errors_legacy
|
||||
|
|
@ -7,6 +9,8 @@ from chalicelib.utils import ch_client, exp_ch_helper
|
|||
from chalicelib.utils import helper, metrics_helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _multiple_values(values, value_key="value"):
|
||||
query_values = {}
|
||||
|
|
@ -338,14 +342,14 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
|
|||
SELECT details.error_id as error_id,
|
||||
name, message, users, total,
|
||||
sessions, last_occurrence, first_occurrence, chart
|
||||
FROM (SELECT JSONExtractString(toString(`$properties`), 'error_id') AS error_id,
|
||||
FROM (SELECT error_id,
|
||||
JSONExtractString(toString(`$properties`), 'name') AS name,
|
||||
JSONExtractString(toString(`$properties`), 'message') AS message,
|
||||
COUNT(DISTINCT user_id) AS users,
|
||||
COUNT(DISTINCT events.session_id) AS sessions,
|
||||
MAX(created_at) AS max_datetime,
|
||||
MIN(created_at) AS min_datetime,
|
||||
COUNT(DISTINCT JSONExtractString(toString(`$properties`), 'error_id'))
|
||||
COUNT(DISTINCT error_id)
|
||||
OVER() AS total
|
||||
FROM {MAIN_EVENTS_TABLE} AS events
|
||||
INNER JOIN (SELECT session_id, coalesce(user_id,toString(user_uuid)) AS user_id
|
||||
|
|
@ -357,7 +361,7 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
|
|||
GROUP BY error_id, name, message
|
||||
ORDER BY {sort} {order}
|
||||
LIMIT %(errors_limit)s OFFSET %(errors_offset)s) AS details
|
||||
INNER JOIN (SELECT JSONExtractString(toString(`$properties`), 'error_id') AS error_id,
|
||||
INNER JOIN (SELECT error_id,
|
||||
toUnixTimestamp(MAX(created_at))*1000 AS last_occurrence,
|
||||
toUnixTimestamp(MIN(created_at))*1000 AS first_occurrence
|
||||
FROM {MAIN_EVENTS_TABLE}
|
||||
|
|
@ -366,7 +370,7 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
|
|||
GROUP BY error_id) AS time_details
|
||||
ON details.error_id=time_details.error_id
|
||||
INNER JOIN (SELECT error_id, groupArray([timestamp, count]) AS chart
|
||||
FROM (SELECT JSONExtractString(toString(`$properties`), 'error_id') AS error_id,
|
||||
FROM (SELECT error_id,
|
||||
gs.generate_series AS timestamp,
|
||||
COUNT(DISTINCT session_id) AS count
|
||||
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS gs
|
||||
|
|
@ -378,9 +382,9 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
|
|||
ORDER BY timestamp) AS sub_table
|
||||
GROUP BY error_id) AS chart_details ON details.error_id=chart_details.error_id;"""
|
||||
|
||||
# print("------------")
|
||||
# print(ch.format(main_ch_query, params))
|
||||
# print("------------")
|
||||
logger.debug("------------")
|
||||
logger.debug(ch.format(main_ch_query, params))
|
||||
logger.debug("------------")
|
||||
query = ch.format(query=main_ch_query, parameters=params)
|
||||
|
||||
rows = ch.execute(query=query)
|
||||
|
|
|
|||
|
|
@ -1,226 +0,0 @@
|
|||
from functools import cache
|
||||
from typing import Optional
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import issues
|
||||
from chalicelib.core.autocomplete import autocomplete
|
||||
from chalicelib.core.sessions import sessions_metas
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.event_filter_definition import SupportedFilter, Event
|
||||
|
||||
|
||||
def get_customs_by_session_id(session_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT
|
||||
c.*,
|
||||
'CUSTOM' AS type
|
||||
FROM events_common.customs AS c
|
||||
WHERE
|
||||
c.session_id = %(session_id)s
|
||||
ORDER BY c.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
return helper.dict_to_camel_case(rows)
|
||||
|
||||
|
||||
def __merge_cells(rows, start, count, replacement):
|
||||
rows[start] = replacement
|
||||
rows = rows[:start + 1] + rows[start + count:]
|
||||
return rows
|
||||
|
||||
|
||||
def __get_grouped_clickrage(rows, session_id, project_id):
|
||||
click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id)
|
||||
if len(click_rage_issues) == 0:
|
||||
return rows
|
||||
|
||||
for c in click_rage_issues:
|
||||
merge_count = c.get("payload")
|
||||
if merge_count is not None:
|
||||
merge_count = merge_count.get("Count", 3)
|
||||
else:
|
||||
merge_count = 3
|
||||
for i in range(len(rows)):
|
||||
if rows[i]["timestamp"] == c["timestamp"]:
|
||||
rows = __merge_cells(rows=rows,
|
||||
start=i,
|
||||
count=merge_count,
|
||||
replacement={**rows[i], "type": "CLICKRAGE", "count": merge_count})
|
||||
break
|
||||
return rows
|
||||
|
||||
|
||||
def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
rows = []
|
||||
if event_type is None or event_type == schemas.EventType.CLICK:
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT
|
||||
c.*,
|
||||
'CLICK' AS type
|
||||
FROM events.clicks AS c
|
||||
WHERE
|
||||
c.session_id = %(session_id)s
|
||||
ORDER BY c.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows += cur.fetchall()
|
||||
if group_clickrage:
|
||||
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
|
||||
if event_type is None or event_type == schemas.EventType.INPUT:
|
||||
cur.execute(cur.mogrify("""
|
||||
SELECT
|
||||
i.*,
|
||||
'INPUT' AS type
|
||||
FROM events.inputs AS i
|
||||
WHERE
|
||||
i.session_id = %(session_id)s
|
||||
ORDER BY i.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows += cur.fetchall()
|
||||
if event_type is None or event_type == schemas.EventType.LOCATION:
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT
|
||||
l.*,
|
||||
l.path AS value,
|
||||
l.path AS url,
|
||||
'LOCATION' AS type
|
||||
FROM events.pages AS l
|
||||
WHERE
|
||||
l.session_id = %(session_id)s
|
||||
ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
|
||||
rows += cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"]))
|
||||
return rows
|
||||
|
||||
|
||||
def _search_tags(project_id, value, key=None, source=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = f"""
|
||||
SELECT public.tags.name
|
||||
'TAG' AS type
|
||||
FROM public.tags
|
||||
WHERE public.tags.project_id = %(project_id)s
|
||||
ORDER BY SIMILARITY(public.tags.name, %(value)s) DESC
|
||||
LIMIT 10
|
||||
"""
|
||||
query = cur.mogrify(query, {'project_id': project_id, 'value': value})
|
||||
cur.execute(query)
|
||||
results = helper.list_to_camel_case(cur.fetchall())
|
||||
return results
|
||||
|
||||
|
||||
class EventType:
|
||||
CLICK = Event(ui_type=schemas.EventType.CLICK, table="events.clicks", column="label")
|
||||
INPUT = Event(ui_type=schemas.EventType.INPUT, table="events.inputs", column="label")
|
||||
LOCATION = Event(ui_type=schemas.EventType.LOCATION, table="events.pages", column="path")
|
||||
CUSTOM = Event(ui_type=schemas.EventType.CUSTOM, table="events_common.customs", column="name")
|
||||
REQUEST = Event(ui_type=schemas.EventType.REQUEST, table="events_common.requests", column="path")
|
||||
GRAPHQL = Event(ui_type=schemas.EventType.GRAPHQL, table="events.graphql", column="name")
|
||||
STATEACTION = Event(ui_type=schemas.EventType.STATE_ACTION, table="events.state_actions", column="name")
|
||||
TAG = Event(ui_type=schemas.EventType.TAG, table="events.tags", column="tag_id")
|
||||
ERROR = Event(ui_type=schemas.EventType.ERROR, table="events.errors",
|
||||
column=None) # column=None because errors are searched by name or message
|
||||
METADATA = Event(ui_type=schemas.FilterType.METADATA, table="public.sessions", column=None)
|
||||
# MOBILE
|
||||
CLICK_MOBILE = Event(ui_type=schemas.EventType.CLICK_MOBILE, table="events_ios.taps", column="label")
|
||||
INPUT_MOBILE = Event(ui_type=schemas.EventType.INPUT_MOBILE, table="events_ios.inputs", column="label")
|
||||
VIEW_MOBILE = Event(ui_type=schemas.EventType.VIEW_MOBILE, table="events_ios.views", column="name")
|
||||
SWIPE_MOBILE = Event(ui_type=schemas.EventType.SWIPE_MOBILE, table="events_ios.swipes", column="label")
|
||||
CUSTOM_MOBILE = Event(ui_type=schemas.EventType.CUSTOM_MOBILE, table="events_common.customs", column="name")
|
||||
REQUEST_MOBILE = Event(ui_type=schemas.EventType.REQUEST_MOBILE, table="events_common.requests", column="path")
|
||||
CRASH_MOBILE = Event(ui_type=schemas.EventType.ERROR_MOBILE, table="events_common.crashes",
|
||||
column=None) # column=None because errors are searched by name or message
|
||||
|
||||
|
||||
@cache
|
||||
def supported_types():
|
||||
return {
|
||||
EventType.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK),
|
||||
query=autocomplete.__generic_query(typename=EventType.CLICK.ui_type)),
|
||||
EventType.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT),
|
||||
query=autocomplete.__generic_query(typename=EventType.INPUT.ui_type)),
|
||||
EventType.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.LOCATION),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.LOCATION.ui_type)),
|
||||
EventType.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.CUSTOM.ui_type)),
|
||||
EventType.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.REQUEST.ui_type)),
|
||||
EventType.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.GRAPHQL),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.GRAPHQL.ui_type)),
|
||||
EventType.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.STATEACTION),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.STATEACTION.ui_type)),
|
||||
EventType.TAG.ui_type: SupportedFilter(get=_search_tags, query=None),
|
||||
EventType.ERROR.ui_type: SupportedFilter(get=autocomplete.__search_errors,
|
||||
query=None),
|
||||
EventType.METADATA.ui_type: SupportedFilter(get=autocomplete.__search_metadata,
|
||||
query=None),
|
||||
# MOBILE
|
||||
EventType.CLICK_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.CLICK_MOBILE.ui_type)),
|
||||
EventType.SWIPE_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.SWIPE_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.SWIPE_MOBILE.ui_type)),
|
||||
EventType.INPUT_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.INPUT_MOBILE.ui_type)),
|
||||
EventType.VIEW_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.VIEW_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.VIEW_MOBILE.ui_type)),
|
||||
EventType.CUSTOM_MOBILE.ui_type: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete(EventType.CUSTOM_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.CUSTOM_MOBILE.ui_type)),
|
||||
EventType.REQUEST_MOBILE.ui_type: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete(EventType.REQUEST_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.REQUEST_MOBILE.ui_type)),
|
||||
EventType.CRASH_MOBILE.ui_type: SupportedFilter(get=autocomplete.__search_errors_mobile,
|
||||
query=None),
|
||||
}
|
||||
|
||||
|
||||
def get_errors_by_session_id(session_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(f"""\
|
||||
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
|
||||
FROM {EventType.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
|
||||
WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
|
||||
ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
|
||||
errors = cur.fetchall()
|
||||
for e in errors:
|
||||
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
|
||||
return helper.list_to_camel_case(errors)
|
||||
|
||||
|
||||
def search(text, event_type, project_id, source, key):
|
||||
if not event_type:
|
||||
return {"data": autocomplete.__get_autocomplete_table(text, project_id)}
|
||||
|
||||
if event_type in supported_types().keys():
|
||||
rows = supported_types()[event_type].get(project_id=project_id, value=text, key=key, source=source)
|
||||
elif event_type + "_MOBILE" in supported_types().keys():
|
||||
rows = supported_types()[event_type + "_MOBILE"].get(project_id=project_id, value=text, key=key, source=source)
|
||||
elif event_type in sessions_metas.supported_types().keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
elif event_type.endswith("_IOS") \
|
||||
and event_type[:-len("_IOS")] in sessions_metas.supported_types().keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
elif event_type.endswith("_MOBILE") \
|
||||
and event_type[:-len("_MOBILE")] in sessions_metas.supported_types().keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
else:
|
||||
return {"errors": ["unsupported event"]}
|
||||
|
||||
return {"data": rows}
|
||||
11
api/chalicelib/core/events/__init__.py
Normal file
11
api/chalicelib/core/events/__init__.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if config("EXP_EVENTS", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental events replay")
|
||||
from . import events_ch as events
|
||||
else:
|
||||
from . import events_pg as events
|
||||
96
api/chalicelib/core/events/events_ch.py
Normal file
96
api/chalicelib/core/events/events_ch.py
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
from chalicelib.utils import ch_client
|
||||
from .events_pg import *
|
||||
from chalicelib.utils.exp_ch_helper import explode_dproperties, add_timestamp
|
||||
|
||||
|
||||
def get_customs_by_session_id(session_id, project_id):
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
rows = cur.execute(""" \
|
||||
SELECT `$properties`,
|
||||
properties,
|
||||
created_at,
|
||||
'CUSTOM' AS type,
|
||||
`$event_name` AS name
|
||||
FROM product_analytics.events
|
||||
WHERE session_id = %(session_id)s
|
||||
AND NOT `$auto_captured`
|
||||
AND `$event_name`!='INCIDENT'
|
||||
ORDER BY created_at;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
rows = helper.list_to_camel_case(rows, ignore_keys=["properties"])
|
||||
rows = explode_dproperties(rows)
|
||||
rows = add_timestamp(rows)
|
||||
return rows
|
||||
|
||||
|
||||
def __merge_cells(rows, start, count, replacement):
|
||||
rows[start] = replacement
|
||||
rows = rows[:start + 1] + rows[start + count:]
|
||||
return rows
|
||||
|
||||
|
||||
def __get_grouped_clickrage(rows, session_id, project_id):
|
||||
click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id)
|
||||
if len(click_rage_issues) == 0:
|
||||
return rows
|
||||
|
||||
for c in click_rage_issues:
|
||||
merge_count = c.get("payload")
|
||||
if merge_count is not None:
|
||||
merge_count = merge_count.get("Count", 3)
|
||||
else:
|
||||
merge_count = 3
|
||||
for i in range(len(rows)):
|
||||
if rows[i]["created_at"] == c["createdAt"]:
|
||||
rows = __merge_cells(rows=rows,
|
||||
start=i,
|
||||
count=merge_count,
|
||||
replacement={**rows[i], "type": "CLICKRAGE", "count": merge_count})
|
||||
break
|
||||
return rows
|
||||
|
||||
|
||||
def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
select_events = ('CLICK', 'INPUT', 'LOCATION')
|
||||
if event_type is not None:
|
||||
select_events = (event_type,)
|
||||
query = cur.format(query=""" \
|
||||
SELECT created_at,
|
||||
`$properties`,
|
||||
`$event_name` AS type
|
||||
FROM product_analytics.events
|
||||
WHERE session_id = %(session_id)s
|
||||
AND `$event_name` IN %(select_events)s
|
||||
AND `$auto_captured`
|
||||
ORDER BY created_at;""",
|
||||
parameters={"project_id": project_id, "session_id": session_id,
|
||||
"select_events": select_events})
|
||||
rows = cur.execute(query)
|
||||
rows = explode_dproperties(rows)
|
||||
if group_clickrage and 'CLICK' in select_events:
|
||||
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
|
||||
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
rows = sorted(rows, key=lambda k: k["createdAt"])
|
||||
rows = add_timestamp(rows)
|
||||
return rows
|
||||
|
||||
|
||||
def get_incidents_by_session_id(session_id, project_id):
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = cur.format(query=""" \
|
||||
SELECT created_at,
|
||||
`$properties`,
|
||||
`$event_name` AS type
|
||||
FROM product_analytics.events
|
||||
WHERE session_id = %(session_id)s
|
||||
AND `$event_name` = 'INCIDENT'
|
||||
AND `$auto_captured`
|
||||
ORDER BY created_at;""",
|
||||
parameters={"project_id": project_id, "session_id": session_id})
|
||||
rows = cur.execute(query)
|
||||
rows = explode_dproperties(rows)
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
rows = sorted(rows, key=lambda k: k["createdAt"])
|
||||
return rows
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.core import events
|
||||
from . import events
|
||||
|
||||
|
||||
def get_customs_by_session_id(session_id, project_id):
|
||||
|
|
@ -58,7 +58,7 @@ def get_crashes_by_session_id(session_id):
|
|||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(f"""
|
||||
SELECT cr.*,uc.*, cr.timestamp - s.start_ts AS time
|
||||
FROM {events.EventType.CRASH_MOBILE.table} AS cr
|
||||
FROM events_common.crashes AS cr
|
||||
INNER JOIN public.crashes_ios AS uc USING (crash_ios_id)
|
||||
INNER JOIN public.sessions AS s USING (session_id)
|
||||
WHERE
|
||||
209
api/chalicelib/core/events/events_pg.py
Normal file
209
api/chalicelib/core/events/events_pg.py
Normal file
|
|
@ -0,0 +1,209 @@
|
|||
import logging
|
||||
from functools import cache
|
||||
from typing import Optional
|
||||
|
||||
import schemas
|
||||
from chalicelib.core.autocomplete import autocomplete
|
||||
from chalicelib.core.issues import issues
|
||||
from chalicelib.core.sessions import sessions_metas
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.event_filter_definition import SupportedFilter
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_customs_by_session_id(session_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(""" \
|
||||
SELECT c.*,
|
||||
'CUSTOM' AS type
|
||||
FROM events_common.customs AS c
|
||||
WHERE c.session_id = %(session_id)s
|
||||
ORDER BY c.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
def __merge_cells(rows, start, count, replacement):
|
||||
rows[start] = replacement
|
||||
rows = rows[:start + 1] + rows[start + count:]
|
||||
return rows
|
||||
|
||||
|
||||
def __get_grouped_clickrage(rows, session_id, project_id):
|
||||
click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id)
|
||||
if len(click_rage_issues) == 0:
|
||||
return rows
|
||||
|
||||
for c in click_rage_issues:
|
||||
merge_count = c.get("payload")
|
||||
if merge_count is not None:
|
||||
merge_count = merge_count.get("Count", 3)
|
||||
else:
|
||||
merge_count = 3
|
||||
for i in range(len(rows)):
|
||||
if rows[i]["timestamp"] == c["timestamp"]:
|
||||
rows = __merge_cells(rows=rows,
|
||||
start=i,
|
||||
count=merge_count,
|
||||
replacement={**rows[i], "type": "CLICKRAGE", "count": merge_count})
|
||||
break
|
||||
return rows
|
||||
|
||||
|
||||
def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
rows = []
|
||||
if event_type is None or event_type == schemas.EventType.CLICK:
|
||||
cur.execute(cur.mogrify(""" \
|
||||
SELECT c.*,
|
||||
'CLICK' AS type
|
||||
FROM events.clicks AS c
|
||||
WHERE c.session_id = %(session_id)s
|
||||
ORDER BY c.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows += cur.fetchall()
|
||||
if group_clickrage:
|
||||
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
|
||||
if event_type is None or event_type == schemas.EventType.INPUT:
|
||||
cur.execute(cur.mogrify("""
|
||||
SELECT i.*,
|
||||
'INPUT' AS type
|
||||
FROM events.inputs AS i
|
||||
WHERE i.session_id = %(session_id)s
|
||||
ORDER BY i.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows += cur.fetchall()
|
||||
if event_type is None or event_type == schemas.EventType.LOCATION:
|
||||
cur.execute(cur.mogrify(""" \
|
||||
SELECT l.*,
|
||||
l.path AS value,
|
||||
l.path AS url,
|
||||
'LOCATION' AS type
|
||||
FROM events.pages AS l
|
||||
WHERE
|
||||
l.session_id = %(session_id)s
|
||||
ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
|
||||
rows += cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"]))
|
||||
return rows
|
||||
|
||||
|
||||
def _search_tags(project_id, value, key=None, source=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = f"""
|
||||
SELECT public.tags.name
|
||||
'TAG' AS type
|
||||
FROM public.tags
|
||||
WHERE public.tags.project_id = %(project_id)s
|
||||
ORDER BY SIMILARITY(public.tags.name, %(value)s) DESC
|
||||
LIMIT 10
|
||||
"""
|
||||
query = cur.mogrify(query, {'project_id': project_id, 'value': value})
|
||||
cur.execute(query)
|
||||
results = helper.list_to_camel_case(cur.fetchall())
|
||||
return results
|
||||
|
||||
|
||||
@cache
|
||||
def supported_types():
|
||||
return {
|
||||
schemas.EventType.CLICK: SupportedFilter(get=autocomplete.__generic_autocomplete(schemas.EventType.CLICK),
|
||||
query=autocomplete.__generic_query(typename=schemas.EventType.CLICK)),
|
||||
schemas.EventType.INPUT: SupportedFilter(get=autocomplete.__generic_autocomplete(schemas.EventType.INPUT),
|
||||
query=autocomplete.__generic_query(typename=schemas.EventType.INPUT)),
|
||||
schemas.EventType.LOCATION: SupportedFilter(get=autocomplete.__generic_autocomplete(schemas.EventType.LOCATION),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.LOCATION)),
|
||||
schemas.EventType.CUSTOM: SupportedFilter(get=autocomplete.__generic_autocomplete(schemas.EventType.CUSTOM),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.CUSTOM)),
|
||||
schemas.EventType.REQUEST: SupportedFilter(get=autocomplete.__generic_autocomplete(schemas.EventType.REQUEST),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.REQUEST)),
|
||||
schemas.EventType.GRAPHQL: SupportedFilter(get=autocomplete.__generic_autocomplete(schemas.EventType.GRAPHQL),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.GRAPHQL)),
|
||||
schemas.EventType.STATE_ACTION: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete(schemas.EventType.STATE_ACTION),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.STATE_ACTION)),
|
||||
schemas.EventType.TAG: SupportedFilter(get=_search_tags, query=None),
|
||||
schemas.EventType.ERROR: SupportedFilter(get=autocomplete.__search_errors,
|
||||
query=None),
|
||||
schemas.FilterType.METADATA: SupportedFilter(get=autocomplete.__search_metadata,
|
||||
query=None),
|
||||
# MOBILE
|
||||
schemas.EventType.CLICK_MOBILE: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete(schemas.EventType.CLICK_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.CLICK_MOBILE)),
|
||||
schemas.EventType.SWIPE_MOBILE: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete(schemas.EventType.SWIPE_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.SWIPE_MOBILE)),
|
||||
schemas.EventType.INPUT_MOBILE: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete(schemas.EventType.INPUT_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.INPUT_MOBILE)),
|
||||
schemas.EventType.VIEW_MOBILE: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete(schemas.EventType.VIEW_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.VIEW_MOBILE)),
|
||||
schemas.EventType.CUSTOM_MOBILE: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete(schemas.EventType.CUSTOM_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.CUSTOM_MOBILE)),
|
||||
schemas.EventType.REQUEST_MOBILE: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete(schemas.EventType.REQUEST_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.REQUEST_MOBILE)),
|
||||
schemas.EventType.ERROR_MOBILE: SupportedFilter(get=autocomplete.__search_errors_mobile,
|
||||
query=None),
|
||||
}
|
||||
|
||||
|
||||
def get_errors_by_session_id(session_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(f"""\
|
||||
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
|
||||
FROM events.errors AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
|
||||
WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
|
||||
ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
|
||||
errors = cur.fetchall()
|
||||
for e in errors:
|
||||
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
|
||||
return helper.list_to_camel_case(errors)
|
||||
|
||||
|
||||
def get_incidents_by_session_id(session_id, project_id):
|
||||
logger.warning("INCIDENTS not supported in PG")
|
||||
return []
|
||||
|
||||
|
||||
def search(text, event_type, project_id, source, key):
|
||||
if not event_type:
|
||||
return {"data": autocomplete.__get_autocomplete_table(text, project_id)}
|
||||
|
||||
if event_type in supported_types().keys():
|
||||
rows = supported_types()[event_type].get(project_id=project_id, value=text, key=key, source=source)
|
||||
elif event_type + "_MOBILE" in supported_types().keys():
|
||||
rows = supported_types()[event_type + "_MOBILE"].get(project_id=project_id, value=text, key=key, source=source)
|
||||
elif event_type in sessions_metas.supported_types().keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
elif event_type.endswith("_IOS") \
|
||||
and event_type[:-len("_IOS")] in sessions_metas.supported_types().keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
elif event_type.endswith("_MOBILE") \
|
||||
and event_type[:-len("_MOBILE")] in sessions_metas.supported_types().keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
else:
|
||||
return {"errors": ["unsupported event"]}
|
||||
|
||||
return {"data": rows}
|
||||
|
|
@ -50,8 +50,8 @@ class JIRAIntegration(base.BaseIntegration):
|
|||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""SELECT username, token, url
|
||||
FROM public.jira_cloud
|
||||
WHERE user_id=%(user_id)s;""",
|
||||
FROM public.jira_cloud
|
||||
WHERE user_id = %(user_id)s;""",
|
||||
{"user_id": self._user_id})
|
||||
)
|
||||
data = helper.dict_to_camel_case(cur.fetchone())
|
||||
|
|
@ -95,10 +95,9 @@ class JIRAIntegration(base.BaseIntegration):
|
|||
def add(self, username, token, url, obfuscate=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
INSERT INTO public.jira_cloud(username, token, user_id,url)
|
||||
VALUES (%(username)s, %(token)s, %(user_id)s,%(url)s)
|
||||
RETURNING username, token, url;""",
|
||||
cur.mogrify(""" \
|
||||
INSERT INTO public.jira_cloud(username, token, user_id, url)
|
||||
VALUES (%(username)s, %(token)s, %(user_id)s, %(url)s) RETURNING username, token, url;""",
|
||||
{"user_id": self._user_id, "username": username,
|
||||
"token": token, "url": url})
|
||||
)
|
||||
|
|
@ -112,9 +111,10 @@ class JIRAIntegration(base.BaseIntegration):
|
|||
def delete(self):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
DELETE FROM public.jira_cloud
|
||||
WHERE user_id=%(user_id)s;""",
|
||||
cur.mogrify(""" \
|
||||
DELETE
|
||||
FROM public.jira_cloud
|
||||
WHERE user_id = %(user_id)s;""",
|
||||
{"user_id": self._user_id})
|
||||
)
|
||||
return {"state": "success"}
|
||||
|
|
@ -125,7 +125,7 @@ class JIRAIntegration(base.BaseIntegration):
|
|||
changes={
|
||||
"username": data.username,
|
||||
"token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \
|
||||
else self.integration.token,
|
||||
else self.integration["token"],
|
||||
"url": str(data.url)
|
||||
},
|
||||
obfuscate=True
|
||||
|
|
|
|||
11
api/chalicelib/core/issues/__init__.py
Normal file
11
api/chalicelib/core/issues/__init__.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if config("EXP_EVENTS", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental issues")
|
||||
from . import issues_ch as issues
|
||||
else:
|
||||
from . import issues_pg as issues
|
||||
59
api/chalicelib/core/issues/issues_ch.py
Normal file
59
api/chalicelib/core/issues/issues_ch.py
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
from chalicelib.utils import ch_client, helper
|
||||
import datetime
|
||||
from chalicelib.utils.exp_ch_helper import explode_dproperties, add_timestamp
|
||||
|
||||
|
||||
def get(project_id, issue_id):
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = cur.format(query=""" \
|
||||
SELECT *
|
||||
FROM product_analytics.events
|
||||
WHERE project_id = %(project_id)s
|
||||
AND issue_id = %(issue_id)s;""",
|
||||
parameters={"project_id": project_id, "issue_id": issue_id})
|
||||
data = cur.execute(query=query)
|
||||
if data is not None and len(data) > 0:
|
||||
data = data[0]
|
||||
data["title"] = helper.get_issue_title(data["type"])
|
||||
return helper.dict_to_camel_case(data)
|
||||
|
||||
|
||||
def get_by_session_id(session_id, project_id, issue_type=None):
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = cur.format(query=f"""\
|
||||
SELECT created_at, `$properties`
|
||||
FROM product_analytics.events
|
||||
WHERE session_id = %(session_id)s
|
||||
AND project_id= %(project_id)s
|
||||
AND `$event_name`='ISSUE'
|
||||
{"AND issue_type = %(type)s" if issue_type is not None else ""}
|
||||
ORDER BY created_at;""",
|
||||
parameters={"session_id": session_id, "project_id": project_id, "type": issue_type})
|
||||
rows = cur.execute(query)
|
||||
rows = explode_dproperties(rows)
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
rows = add_timestamp(rows)
|
||||
return rows
|
||||
|
||||
|
||||
# To reduce the number of issues in the replay;
|
||||
# will be removed once we agree on how to show issues
|
||||
def reduce_issues(issues_list):
|
||||
if issues_list is None:
|
||||
return None
|
||||
i = 0
|
||||
# remove same-type issues if the time between them is <2s
|
||||
while i < len(issues_list) - 1:
|
||||
for j in range(i + 1, len(issues_list)):
|
||||
if issues_list[i]["issueType"] == issues_list[j]["issueType"]:
|
||||
break
|
||||
else:
|
||||
i += 1
|
||||
break
|
||||
|
||||
if issues_list[i]["createdAt"] - issues_list[j]["createdAt"] < datetime.timedelta(seconds=2):
|
||||
issues_list.pop(j)
|
||||
else:
|
||||
i += 1
|
||||
|
||||
return issues_list
|
||||
|
|
@ -4,12 +4,11 @@ from chalicelib.utils import pg_client, helper
|
|||
def get(project_id, issue_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""\
|
||||
SELECT
|
||||
*
|
||||
""" \
|
||||
SELECT *
|
||||
FROM public.issues
|
||||
WHERE project_id = %(project_id)s
|
||||
AND issue_id = %(issue_id)s;""",
|
||||
AND issue_id = %(issue_id)s;""",
|
||||
{"project_id": project_id, "issue_id": issue_id}
|
||||
)
|
||||
cur.execute(query=query)
|
||||
|
|
@ -35,6 +34,29 @@ def get_by_session_id(session_id, project_id, issue_type=None):
|
|||
return helper.list_to_camel_case(cur.fetchall())
|
||||
|
||||
|
||||
# To reduce the number of issues in the replay;
|
||||
# will be removed once we agree on how to show issues
|
||||
def reduce_issues(issues_list):
|
||||
if issues_list is None:
|
||||
return None
|
||||
i = 0
|
||||
# remove same-type issues if the time between them is <2s
|
||||
while i < len(issues_list) - 1:
|
||||
for j in range(i + 1, len(issues_list)):
|
||||
if issues_list[i]["type"] == issues_list[j]["type"]:
|
||||
break
|
||||
else:
|
||||
i += 1
|
||||
break
|
||||
|
||||
if issues_list[i]["timestamp"] - issues_list[j]["timestamp"] < 2000:
|
||||
issues_list.pop(j)
|
||||
else:
|
||||
i += 1
|
||||
|
||||
return issues_list
|
||||
|
||||
|
||||
def get_all_types():
|
||||
return [
|
||||
{
|
||||
|
|
@ -241,3 +241,24 @@ def get_colname_by_key(project_id, key):
|
|||
return None
|
||||
|
||||
return index_to_colname(meta_keys[key])
|
||||
|
||||
|
||||
def get_for_filters(project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""SELECT {",".join(column_names())}
|
||||
FROM public.projects
|
||||
WHERE project_id = %(project_id)s
|
||||
AND deleted_at ISNULL
|
||||
LIMIT 1;""", {"project_id": project_id})
|
||||
cur.execute(query=query)
|
||||
metas = cur.fetchone()
|
||||
results = []
|
||||
if metas is not None:
|
||||
for i, k in enumerate(metas.keys()):
|
||||
if metas[k] is not None:
|
||||
results.append({"id": f"meta_{i}",
|
||||
"name": k,
|
||||
"displayName": metas[k],
|
||||
"possibleTypes": ["String"],
|
||||
"autoCaptured": False})
|
||||
return {"total": len(results), "list": results}
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import logging
|
|||
from fastapi import HTTPException, status
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import issues
|
||||
from chalicelib.core.issues import issues
|
||||
from chalicelib.core.errors import errors
|
||||
from chalicelib.core.metrics import heatmaps, product_analytics, funnels
|
||||
from chalicelib.core.sessions import sessions, sessions_search
|
||||
|
|
@ -61,6 +61,9 @@ def get_heat_map_chart(project: schemas.ProjectContext, user_id, data: schemas.C
|
|||
return None
|
||||
data.series[0].filter.filters += data.series[0].filter.events
|
||||
data.series[0].filter.events = []
|
||||
print(">>>>>>>>>>>>>>>>>>>>>>>>><")
|
||||
print(data.series[0].filter.model_dump())
|
||||
print(">>>>>>>>>>>>>>>>>>>>>>>>><")
|
||||
return heatmaps.search_short_session(project_id=project.project_id, user_id=user_id,
|
||||
data=schemas.HeatMapSessionsSearch(
|
||||
**data.series[0].filter.model_dump()),
|
||||
|
|
@ -169,7 +172,8 @@ def get_sessions_by_card_id(project: schemas.ProjectContext, user_id, metric_id,
|
|||
results = []
|
||||
for s in data.series:
|
||||
results.append({"seriesId": s.series_id, "seriesName": s.name,
|
||||
**sessions_search.search_sessions(data=s.filter, project=project, user_id=user_id)})
|
||||
**sessions_search.search_sessions(data=s.filter, project=project, user_id=user_id,
|
||||
metric_of=data.metric_of)})
|
||||
|
||||
return results
|
||||
|
||||
|
|
@ -184,7 +188,8 @@ def get_sessions(project: schemas.ProjectContext, user_id, data: schemas.CardSes
|
|||
s.filter = schemas.SessionsSearchPayloadSchema(**s.filter.model_dump(by_alias=True))
|
||||
|
||||
results.append({"seriesId": None, "seriesName": s.name,
|
||||
**sessions_search.search_sessions(data=s.filter, project=project, user_id=user_id)})
|
||||
**sessions_search.search_sessions(data=s.filter, project=project, user_id=user_id,
|
||||
metric_of=data.metric_of)})
|
||||
|
||||
return results
|
||||
|
||||
|
|
@ -241,14 +246,13 @@ def create_card(project: schemas.ProjectContext, user_id, data: schemas.CardSche
|
|||
params["card_info"] = json.dumps(params["card_info"])
|
||||
|
||||
query = """INSERT INTO metrics (project_id, user_id, name, is_public,
|
||||
view_type, metric_type, metric_of, metric_value,
|
||||
metric_format, default_config, thumbnail, data,
|
||||
card_info)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
|
||||
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
|
||||
%(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s,
|
||||
%(card_info)s)
|
||||
RETURNING metric_id"""
|
||||
view_type, metric_type, metric_of, metric_value,
|
||||
metric_format, default_config, thumbnail, data,
|
||||
card_info)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
|
||||
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
|
||||
%(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s,
|
||||
%(card_info)s) RETURNING metric_id"""
|
||||
if len(data.series) > 0:
|
||||
query = f"""WITH m AS ({query})
|
||||
INSERT INTO metric_series(metric_id, index, name, filter)
|
||||
|
|
@ -525,13 +529,13 @@ def get_all(project_id, user_id):
|
|||
def delete_card(project_id, metric_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.metrics
|
||||
SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now())
|
||||
WHERE project_id = %(project_id)s
|
||||
AND metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public)
|
||||
RETURNING data;""",
|
||||
cur.mogrify(""" \
|
||||
UPDATE public.metrics
|
||||
SET deleted_at = timezone('utc'::text, now()),
|
||||
edited_at = timezone('utc'::text, now())
|
||||
WHERE project_id = %(project_id)s
|
||||
AND metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public) RETURNING data;""",
|
||||
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id})
|
||||
)
|
||||
|
||||
|
|
@ -615,13 +619,14 @@ def get_series_for_alert(project_id, user_id):
|
|||
FALSE AS predefined,
|
||||
metric_id,
|
||||
series_id
|
||||
FROM metric_series
|
||||
INNER JOIN metrics USING (metric_id)
|
||||
WHERE metrics.deleted_at ISNULL
|
||||
AND metrics.project_id = %(project_id)s
|
||||
AND metrics.metric_type = 'timeseries'
|
||||
AND (user_id = %(user_id)s OR is_public)
|
||||
ORDER BY name;""",
|
||||
FROM metric_series
|
||||
INNER JOIN metrics USING (metric_id)
|
||||
WHERE metrics.deleted_at ISNULL
|
||||
AND metrics.project_id = %(project_id)s
|
||||
AND metrics.metric_type = 'timeseries'
|
||||
AND (user_id = %(user_id)s
|
||||
OR is_public)
|
||||
ORDER BY name;""",
|
||||
{"project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
)
|
||||
|
|
@ -632,11 +637,11 @@ def get_series_for_alert(project_id, user_id):
|
|||
def change_state(project_id, metric_id, user_id, status):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.metrics
|
||||
SET active = %(status)s
|
||||
WHERE metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
cur.mogrify(""" \
|
||||
UPDATE public.metrics
|
||||
SET active = %(status)s
|
||||
WHERE metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
{"metric_id": metric_id, "status": status, "user_id": user_id})
|
||||
)
|
||||
return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id)
|
||||
|
|
@ -674,7 +679,8 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
|
|||
"issue": issue}
|
||||
|
||||
|
||||
def make_chart_from_card(project: schemas.ProjectContext, user_id, metric_id, data: schemas.CardSessionsSchema):
|
||||
def make_chart_from_card(project: schemas.ProjectContext, user_id, metric_id,
|
||||
data: schemas.CardSessionsSchema, for_dashboard: bool = False):
|
||||
raw_metric: dict = get_card(metric_id=metric_id, project_id=project.project_id, user_id=user_id, include_data=True)
|
||||
|
||||
if raw_metric is None:
|
||||
|
|
@ -693,7 +699,8 @@ def make_chart_from_card(project: schemas.ProjectContext, user_id, metric_id, da
|
|||
return heatmaps.search_short_session(project_id=project.project_id,
|
||||
data=schemas.HeatMapSessionsSearch(**metric.model_dump()),
|
||||
user_id=user_id)
|
||||
|
||||
elif metric.metric_type == schemas.MetricType.PATH_ANALYSIS and for_dashboard:
|
||||
metric.hide_excess = True
|
||||
return get_chart(project=project, data=metric, user_id=user_id)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from chalicelib.utils import helper
|
|||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
|
||||
def filter_stages(stages: List[schemas.SessionSearchEventSchema2]):
|
||||
def filter_stages(stages: List[schemas.SessionSearchEventSchema]):
|
||||
ALLOW_TYPES = [schemas.EventType.CLICK, schemas.EventType.INPUT,
|
||||
schemas.EventType.LOCATION, schemas.EventType.CUSTOM,
|
||||
schemas.EventType.CLICK_MOBILE, schemas.EventType.INPUT_MOBILE,
|
||||
|
|
@ -15,10 +15,10 @@ def filter_stages(stages: List[schemas.SessionSearchEventSchema2]):
|
|||
|
||||
|
||||
def __parse_events(f_events: List[dict]):
|
||||
return [schemas.SessionSearchEventSchema2.parse_obj(e) for e in f_events]
|
||||
return [schemas.SessionSearchEventSchema.parse_obj(e) for e in f_events]
|
||||
|
||||
|
||||
def __fix_stages(f_events: List[schemas.SessionSearchEventSchema2]):
|
||||
def __fix_stages(f_events: List[schemas.SessionSearchEventSchema]):
|
||||
if f_events is None:
|
||||
return
|
||||
events = []
|
||||
|
|
|
|||
|
|
@ -160,7 +160,7 @@ s.start_ts,
|
|||
s.duration"""
|
||||
|
||||
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema2 | None, session_id: str, project_id: int,
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema | None, session_id: str, project_id: int,
|
||||
start_time: int,
|
||||
end_time: int) -> str | None:
|
||||
full_args = {
|
||||
|
|
@ -240,13 +240,13 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i
|
|||
value=[schemas.PlatformType.DESKTOP],
|
||||
operator=schemas.SearchEventOperator.IS))
|
||||
if not location_condition:
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
if no_click:
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
|
||||
value=[0],
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import logging
|
|||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events
|
||||
from chalicelib.core.events import events
|
||||
from chalicelib.core.metrics.modules import sessions, sessions_mobs
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
|
|
@ -24,8 +24,9 @@ def get_by_url(project_id, data: schemas.GetHeatMapPayloadSchema):
|
|||
"main_events.`$event_name` = 'CLICK'",
|
||||
"isNotNull(JSON_VALUE(CAST(main_events.`$properties` AS String), '$.normalized_x'))"
|
||||
]
|
||||
|
||||
if data.operator == schemas.SearchEventOperator.IS:
|
||||
if data.operator == schemas.SearchEventOperator.PATTERN:
|
||||
constraints.append("match(main_events.`$properties`.url_path'.:String,%(url)s)")
|
||||
elif data.operator == schemas.SearchEventOperator.IS:
|
||||
constraints.append("JSON_VALUE(CAST(main_events.`$properties` AS String), '$.url_path') = %(url)s")
|
||||
else:
|
||||
constraints.append("JSON_VALUE(CAST(main_events.`$properties` AS String), '$.url_path') ILIKE %(url)s")
|
||||
|
|
@ -179,7 +180,7 @@ toUnixTimestamp(s.datetime)*1000 AS start_ts,
|
|||
s.duration AS duration"""
|
||||
|
||||
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema2 | None, session_id: str, project_id: int,
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema | None, session_id: str, project_id: int,
|
||||
start_time: int,
|
||||
end_time: int) -> str | None:
|
||||
full_args = {
|
||||
|
|
@ -262,13 +263,13 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i
|
|||
value=[schemas.PlatformType.DESKTOP],
|
||||
operator=schemas.SearchEventOperator.IS))
|
||||
if not location_condition:
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
if no_click:
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
|
||||
value=[0],
|
||||
|
|
|
|||
|
|
@ -7,7 +7,8 @@ from typing import List
|
|||
from psycopg2.extras import RealDictRow
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata
|
||||
from chalicelib.core import metadata
|
||||
from chalicelib.core.events import events
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
|
|
@ -76,10 +77,10 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
|
|||
values["maxDuration"] = f.value[1]
|
||||
elif filter_type == schemas.FilterType.REFERRER:
|
||||
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
|
||||
filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"]
|
||||
filter_extra_from = [f"INNER JOIN {"events.pages"} AS p USING(session_id)"]
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
|
||||
elif filter_type == events.EventType.METADATA.ui_type:
|
||||
elif filter_type == schemas.FilterType.METADATA:
|
||||
if meta_keys is None:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
meta_keys = {m["key"]: m["index"] for m in meta_keys}
|
||||
|
|
@ -121,31 +122,31 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
|
|||
op = sh.get_sql_operator(s.operator)
|
||||
# event_type = s["type"].upper()
|
||||
event_type = s.type
|
||||
if event_type == events.EventType.CLICK.ui_type:
|
||||
next_table = events.EventType.CLICK.table
|
||||
next_col_name = events.EventType.CLICK.column
|
||||
elif event_type == events.EventType.INPUT.ui_type:
|
||||
next_table = events.EventType.INPUT.table
|
||||
next_col_name = events.EventType.INPUT.column
|
||||
elif event_type == events.EventType.LOCATION.ui_type:
|
||||
next_table = events.EventType.LOCATION.table
|
||||
next_col_name = events.EventType.LOCATION.column
|
||||
elif event_type == events.EventType.CUSTOM.ui_type:
|
||||
next_table = events.EventType.CUSTOM.table
|
||||
next_col_name = events.EventType.CUSTOM.column
|
||||
if event_type == schemas.EventType.CLICK:
|
||||
next_table = "events.clicks"
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.INPUT:
|
||||
next_table = "events.inputs"
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.LOCATION:
|
||||
next_table = "events.pages"
|
||||
next_col_name = "path"
|
||||
elif event_type == schemas.EventType.CUSTOM:
|
||||
next_table = "events_common.customs"
|
||||
next_col_name = "name"
|
||||
# IOS --------------
|
||||
elif event_type == events.EventType.CLICK_MOBILE.ui_type:
|
||||
next_table = events.EventType.CLICK_MOBILE.table
|
||||
next_col_name = events.EventType.CLICK_MOBILE.column
|
||||
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
|
||||
next_table = events.EventType.INPUT_MOBILE.table
|
||||
next_col_name = events.EventType.INPUT_MOBILE.column
|
||||
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
|
||||
next_table = events.EventType.VIEW_MOBILE.table
|
||||
next_col_name = events.EventType.VIEW_MOBILE.column
|
||||
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
|
||||
next_table = events.EventType.CUSTOM_MOBILE.table
|
||||
next_col_name = events.EventType.CUSTOM_MOBILE.column
|
||||
elif event_type == schemas.EventType.CLICK_MOBILE:
|
||||
next_table = "events_ios.taps"
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.INPUT_MOBILE:
|
||||
next_table = "events_ios.inputs"
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.VIEW_MOBILE:
|
||||
next_table = "events_ios.views"
|
||||
next_col_name = "name"
|
||||
elif event_type == schemas.EventType.CUSTOM_MOBILE:
|
||||
next_table = "events_common.customs"
|
||||
next_col_name = "name"
|
||||
else:
|
||||
logger.warning(f"=================UNDEFINED:{event_type}")
|
||||
continue
|
||||
|
|
@ -241,7 +242,7 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
|
|||
:return:
|
||||
"""
|
||||
|
||||
stages: List[schemas.SessionSearchEventSchema2] = filter_d.events
|
||||
stages: List[schemas.SessionSearchEventSchema] = filter_d.events
|
||||
filters: List[schemas.SessionSearchFilterSchema] = filter_d.filters
|
||||
|
||||
stage_constraints = ["main.timestamp <= %(endTimestamp)s"]
|
||||
|
|
@ -297,10 +298,10 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
|
|||
values["maxDuration"] = f.value[1]
|
||||
elif filter_type == schemas.FilterType.REFERRER:
|
||||
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
|
||||
filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"]
|
||||
filter_extra_from = [f"INNER JOIN {"events.pages"} AS p USING(session_id)"]
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
|
||||
elif filter_type == events.EventType.METADATA.ui_type:
|
||||
elif filter_type == schemas.FilterType.METADATA:
|
||||
if meta_keys is None:
|
||||
meta_keys = metadata.get(project_id=project.project_id)
|
||||
meta_keys = {m["key"]: m["index"] for m in meta_keys}
|
||||
|
|
@ -342,31 +343,31 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
|
|||
op = sh.get_sql_operator(s.operator)
|
||||
# event_type = s["type"].upper()
|
||||
event_type = s.type
|
||||
if event_type == events.EventType.CLICK.ui_type:
|
||||
next_table = events.EventType.CLICK.table
|
||||
next_col_name = events.EventType.CLICK.column
|
||||
elif event_type == events.EventType.INPUT.ui_type:
|
||||
next_table = events.EventType.INPUT.table
|
||||
next_col_name = events.EventType.INPUT.column
|
||||
elif event_type == events.EventType.LOCATION.ui_type:
|
||||
next_table = events.EventType.LOCATION.table
|
||||
next_col_name = events.EventType.LOCATION.column
|
||||
elif event_type == events.EventType.CUSTOM.ui_type:
|
||||
next_table = events.EventType.CUSTOM.table
|
||||
next_col_name = events.EventType.CUSTOM.column
|
||||
if event_type == schemas.EventType.CLICK:
|
||||
next_table = "events.clicks"
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.INPUT:
|
||||
next_table = "events.inputs"
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.LOCATION:
|
||||
next_table = "events.pages"
|
||||
next_col_name = "path"
|
||||
elif event_type == schemas.EventType.CUSTOM:
|
||||
next_table = "events_common.customs"
|
||||
next_col_name = "name"
|
||||
# IOS --------------
|
||||
elif event_type == events.EventType.CLICK_MOBILE.ui_type:
|
||||
next_table = events.EventType.CLICK_MOBILE.table
|
||||
next_col_name = events.EventType.CLICK_MOBILE.column
|
||||
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
|
||||
next_table = events.EventType.INPUT_MOBILE.table
|
||||
next_col_name = events.EventType.INPUT_MOBILE.column
|
||||
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
|
||||
next_table = events.EventType.VIEW_MOBILE.table
|
||||
next_col_name = events.EventType.VIEW_MOBILE.column
|
||||
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
|
||||
next_table = events.EventType.CUSTOM_MOBILE.table
|
||||
next_col_name = events.EventType.CUSTOM_MOBILE.column
|
||||
elif event_type == schemas.EventType.CLICK_MOBILE:
|
||||
next_table = "events_ios.taps"
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.INPUT_MOBILE:
|
||||
next_table = "events_ios.inputs"
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.VIEW_MOBILE:
|
||||
next_table = "events_ios.views"
|
||||
next_col_name = "name"
|
||||
elif event_type == schemas.EventType.CUSTOM_MOBILE:
|
||||
next_table = "events_common.customs"
|
||||
next_col_name = "name"
|
||||
else:
|
||||
logger.warning(f"=================UNDEFINED:{event_type}")
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -8,14 +8,14 @@ from chalicelib.utils import ch_client
|
|||
from chalicelib.utils import exp_ch_helper
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.core import events
|
||||
from chalicelib.core.events import events
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas.ProjectContext,
|
||||
metric_format: schemas.MetricExtendedFormatType) -> List[RealDictRow]:
|
||||
stages: List[schemas.SessionSearchEventSchema2] = filter_d.events
|
||||
stages: List[schemas.SessionSearchEventSchema] = filter_d.events
|
||||
filters: List[schemas.SessionSearchFilterSchema] = filter_d.filters
|
||||
platform = project.platform
|
||||
constraints = ["e.project_id = %(project_id)s",
|
||||
|
|
@ -82,7 +82,7 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
|
|||
elif filter_type == schemas.FilterType.REFERRER:
|
||||
constraints.append(
|
||||
sh.multi_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
|
||||
elif filter_type == events.EventType.METADATA.ui_type:
|
||||
elif filter_type == schemas.FilterType.METADATA:
|
||||
if meta_keys is None:
|
||||
meta_keys = metadata.get(project_id=project.project_id)
|
||||
meta_keys = {m["key"]: m["index"] for m in meta_keys}
|
||||
|
|
@ -125,29 +125,29 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
|
|||
e_k = f"e_value{i}"
|
||||
event_type = s.type
|
||||
next_event_type = exp_ch_helper.get_event_type(event_type, platform=platform)
|
||||
if event_type == events.EventType.CLICK.ui_type:
|
||||
if event_type == schemas.EventType.CLICK:
|
||||
if platform == "web":
|
||||
next_col_name = events.EventType.CLICK.column
|
||||
next_col_name = "label"
|
||||
if not is_any:
|
||||
if schemas.ClickEventExtraOperator.has_value(s.operator):
|
||||
specific_condition = sh.multi_conditions(f"selector {op} %({e_k})s", s.value, value_key=e_k)
|
||||
else:
|
||||
next_col_name = events.EventType.CLICK_MOBILE.column
|
||||
elif event_type == events.EventType.INPUT.ui_type:
|
||||
next_col_name = events.EventType.INPUT.column
|
||||
elif event_type == events.EventType.LOCATION.ui_type:
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.INPUT:
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.LOCATION:
|
||||
next_col_name = 'url_path'
|
||||
elif event_type == events.EventType.CUSTOM.ui_type:
|
||||
next_col_name = events.EventType.CUSTOM.column
|
||||
elif event_type == schemas.EventType.CUSTOM:
|
||||
next_col_name = "name"
|
||||
# IOS --------------
|
||||
elif event_type == events.EventType.CLICK_MOBILE.ui_type:
|
||||
next_col_name = events.EventType.CLICK_MOBILE.column
|
||||
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
|
||||
next_col_name = events.EventType.INPUT_MOBILE.column
|
||||
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
|
||||
next_col_name = events.EventType.VIEW_MOBILE.column
|
||||
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
|
||||
next_col_name = events.EventType.CUSTOM_MOBILE.column
|
||||
elif event_type == schemas.EventType.CLICK_MOBILE:
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.INPUT_MOBILE:
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.VIEW_MOBILE:
|
||||
next_col_name = "name"
|
||||
elif event_type == schemas.EventType.CUSTOM_MOBILE:
|
||||
next_col_name = "name"
|
||||
else:
|
||||
logger.warning(f"=================UNDEFINED:{event_type}")
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -85,6 +85,9 @@ def __complete_missing_steps(start_time, end_time, density, neutral, rows, time_
|
|||
# compute avg_time_from_previous at the same level as sessions_count (this was removed in v1.22)
|
||||
# if start-point is selected, the selected event is ranked n°1
|
||||
def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
|
||||
if not data.hide_excess:
|
||||
data.hide_excess = True
|
||||
data.rows = 50
|
||||
sub_events = []
|
||||
start_points_conditions = []
|
||||
step_0_conditions = []
|
||||
|
|
|
|||
|
|
@ -1,14 +0,0 @@
|
|||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
|
||||
|
||||
def search_events(project_id: int, data: dict):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT *
|
||||
FROM taha.events
|
||||
WHERE project_id=%(project_id)s
|
||||
ORDER BY created_at;""",
|
||||
params={"project_id": project_id})
|
||||
x = ch_client.execute(r)
|
||||
|
||||
return x
|
||||
|
|
@ -6,6 +6,7 @@ from decouple import config
|
|||
import schemas
|
||||
from chalicelib.core.collaborations.collaboration_msteams import MSTeams
|
||||
from chalicelib.core.collaborations.collaboration_slack import Slack
|
||||
from chalicelib.core.modules import TENANT_CONDITION
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
|
@ -16,13 +17,13 @@ logger = logging.getLogger(__name__)
|
|||
def get_note(tenant_id, project_id, user_id, note_id, share=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
|
||||
{",(SELECT name FROM users WHERE tenant_id=%(tenant_id)s AND user_id=%(share)s AND deleted_at ISNULL) AS share_name" if share else ""}
|
||||
{f",(SELECT name FROM users WHERE {TENANT_CONDITION} AND user_id=%(share)s AND deleted_at ISNULL) AS share_name" if share else ""}
|
||||
FROM sessions_notes INNER JOIN users USING (user_id)
|
||||
WHERE sessions_notes.project_id = %(project_id)s
|
||||
AND sessions_notes.note_id = %(note_id)s
|
||||
AND sessions_notes.deleted_at IS NULL
|
||||
AND (sessions_notes.user_id = %(user_id)s
|
||||
OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s);""",
|
||||
OR sessions_notes.is_public AND {TENANT_CONDITION});""",
|
||||
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id,
|
||||
"note_id": note_id, "share": share})
|
||||
|
||||
|
|
@ -43,7 +44,7 @@ def get_session_notes(tenant_id, project_id, session_id, user_id):
|
|||
AND sessions_notes.deleted_at IS NULL
|
||||
AND sessions_notes.session_id = %(session_id)s
|
||||
AND (sessions_notes.user_id = %(user_id)s
|
||||
OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s)
|
||||
OR sessions_notes.is_public AND {TENANT_CONDITION})
|
||||
ORDER BY created_at DESC;""",
|
||||
{"project_id": project_id, "user_id": user_id,
|
||||
"tenant_id": tenant_id, "session_id": session_id})
|
||||
|
|
@ -62,7 +63,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se
|
|||
conditions = [
|
||||
"sessions_notes.project_id = %(project_id)s",
|
||||
"sessions_notes.deleted_at IS NULL",
|
||||
"users.tenant_id = %(tenant_id)s"
|
||||
TENANT_CONDITION
|
||||
]
|
||||
params = {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}
|
||||
|
||||
|
|
@ -127,7 +128,7 @@ def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNote
|
|||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public, thumbnail, start_at, end_at)
|
||||
VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s, %(thumbnail)s, %(start_at)s, %(end_at)s)
|
||||
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND users.tenant_id=%(tenant_id)s) AS user_name;""",
|
||||
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND {TENANT_CONDITION}) AS user_name;""",
|
||||
{"user_id": user_id, "project_id": project_id, "session_id": session_id,
|
||||
**data.model_dump(),
|
||||
"tenant_id": tenant_id})
|
||||
|
|
@ -161,7 +162,7 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot
|
|||
AND user_id = %(user_id)s
|
||||
AND note_id = %(note_id)s
|
||||
AND deleted_at ISNULL
|
||||
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND users.tenant_id=%(tenant_id)s) AS user_name;""",
|
||||
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND {TENANT_CONDITION}) AS user_name;""",
|
||||
{"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.model_dump(),
|
||||
"tenant_id": tenant_id})
|
||||
)
|
||||
0
api/chalicelib/core/product_analytics/__init__.py
Normal file
0
api/chalicelib/core/product_analytics/__init__.py
Normal file
59
api/chalicelib/core/product_analytics/autocomplete.py
Normal file
59
api/chalicelib/core/product_analytics/autocomplete.py
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
from typing import Optional
|
||||
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
|
||||
|
||||
def search_events(project_id: int, q: Optional[str] = None):
|
||||
with ClickHouseClient() as ch_client:
|
||||
full_args = {"project_id": project_id, "limit": 20}
|
||||
|
||||
constraints = ["project_id = %(project_id)s",
|
||||
"_timestamp >= now()-INTERVAL 1 MONTH"]
|
||||
if q:
|
||||
constraints += ["value ILIKE %(q)s"]
|
||||
full_args["q"] = helper.string_to_sql_like(q)
|
||||
query = ch_client.format(
|
||||
f"""SELECT value,data_count
|
||||
FROM product_analytics.autocomplete_events_grouped
|
||||
WHERE {" AND ".join(constraints)}
|
||||
ORDER BY data_count DESC
|
||||
LIMIT %(limit)s;""",
|
||||
parameters=full_args)
|
||||
rows = ch_client.execute(query)
|
||||
|
||||
return {"values": helper.list_to_camel_case(rows), "_src": 2}
|
||||
|
||||
|
||||
def search_properties(project_id: int, property_name: Optional[str] = None, event_name: Optional[str] = None,
|
||||
q: Optional[str] = None):
|
||||
with ClickHouseClient() as ch_client:
|
||||
select = "value, data_count"
|
||||
grouping = ""
|
||||
full_args = {"project_id": project_id, "limit": 20,
|
||||
"event_name": event_name, "property_name": property_name,
|
||||
"q_l": helper.string_to_sql_like(q)}
|
||||
|
||||
constraints = ["project_id = %(project_id)s",
|
||||
"_timestamp >= now()-INTERVAL 1 MONTH",
|
||||
"property_name = %(property_name)s"]
|
||||
if event_name:
|
||||
constraints += ["event_name = %(event_name)s"]
|
||||
else:
|
||||
select = "value, sum(aepg.data_count) AS data_count"
|
||||
grouping = "GROUP BY 1"
|
||||
|
||||
if q:
|
||||
constraints += ["value ILIKE %(q_l)s"]
|
||||
|
||||
query = ch_client.format(
|
||||
f"""SELECT {select}
|
||||
FROM product_analytics.autocomplete_event_properties_grouped AS aepg
|
||||
WHERE {" AND ".join(constraints)}
|
||||
{grouping}
|
||||
ORDER BY data_count DESC
|
||||
LIMIT %(limit)s;""",
|
||||
parameters=full_args)
|
||||
rows = ch_client.execute(query)
|
||||
|
||||
return {"events": helper.list_to_camel_case(rows), "_src": 2}
|
||||
180
api/chalicelib/core/product_analytics/events.py
Normal file
180
api/chalicelib/core/product_analytics/events.py
Normal file
|
|
@ -0,0 +1,180 @@
|
|||
import logging
|
||||
|
||||
import schemas
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
from chalicelib.utils.exp_ch_helper import get_sub_condition, get_col_cast
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
PREDEFINED_EVENTS = [
|
||||
"CLICK",
|
||||
"INPUT",
|
||||
"LOCATION",
|
||||
"ERROR",
|
||||
"REQUEST"
|
||||
]
|
||||
|
||||
|
||||
def get_events(project_id: int):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
""" \
|
||||
SELECT DISTINCT
|
||||
ON(event_name,auto_captured)
|
||||
COUNT (1) OVER () AS total,
|
||||
event_name AS name, display_name, description,
|
||||
auto_captured
|
||||
FROM product_analytics.all_events
|
||||
WHERE project_id=%(project_id)s
|
||||
ORDER BY auto_captured, display_name, event_name;""",
|
||||
parameters={"project_id": project_id})
|
||||
rows = ch_client.execute(r)
|
||||
if len(rows) == 0:
|
||||
return {"total": len(PREDEFINED_EVENTS), "list": [{
|
||||
"name": e,
|
||||
"displayName": "",
|
||||
"description": "",
|
||||
"autoCaptured": True,
|
||||
"id": "event_0",
|
||||
"dataType": "string",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"_foundInPredefinedList": False
|
||||
} for e in PREDEFINED_EVENTS]}
|
||||
total = rows[0]["total"]
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
for i, row in enumerate(rows):
|
||||
row["id"] = f"event_{i}"
|
||||
row["dataType"] = "string"
|
||||
row["possibleTypes"] = ["string"]
|
||||
row["_foundInPredefinedList"] = True
|
||||
row.pop("total")
|
||||
keys = [r["name"] for r in rows]
|
||||
for e in PREDEFINED_EVENTS:
|
||||
if e not in keys:
|
||||
total += 1
|
||||
rows.append({
|
||||
"name": e,
|
||||
"displayName": "",
|
||||
"description": "",
|
||||
"autoCaptured": True,
|
||||
"id": "event_0",
|
||||
"dataType": "string",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"_foundInPredefinedList": False
|
||||
})
|
||||
return {"total": total, "list": rows}
|
||||
|
||||
|
||||
def search_events(project_id: int, data: schemas.EventsSearchPayloadSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp,
|
||||
"projectId": project_id, "limit": data.limit, "offset": (data.page - 1) * data.limit}
|
||||
|
||||
constraints = ["project_id = %(projectId)s",
|
||||
"created_at >= toDateTime(%(startDate)s/1000)",
|
||||
"created_at <= toDateTime(%(endDate)s/1000)"]
|
||||
ev_constraints = []
|
||||
for i, f in enumerate(data.filters):
|
||||
if not f.is_event:
|
||||
f.value = helper.values_for_operator(value=f.value, op=f.operator)
|
||||
f_k = f"f_value{i}"
|
||||
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
|
||||
is_any = sh.isAny_opreator(f.operator)
|
||||
is_undefined = sh.isUndefined_operator(f.operator)
|
||||
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
|
||||
if f.is_predefined:
|
||||
column = f.name
|
||||
else:
|
||||
column = f"properties.{f.name}"
|
||||
|
||||
if is_any:
|
||||
condition = f"notEmpty{column})"
|
||||
elif is_undefined:
|
||||
condition = f"empty({column})"
|
||||
else:
|
||||
condition = sh.multi_conditions(
|
||||
get_sub_condition(col_name=column, val_name=f_k, operator=f.operator),
|
||||
values=f.value, value_key=f_k)
|
||||
constraints.append(condition)
|
||||
|
||||
else:
|
||||
e_k = f"e_value{i}"
|
||||
full_args = {**full_args, e_k: f.name}
|
||||
condition = f"`$event_name` = %({e_k})s"
|
||||
sub_conditions = []
|
||||
for j, ef in enumerate(f.properties.filters):
|
||||
p_k = f"e_{i}_p_{j}"
|
||||
full_args = {**full_args, **sh.multi_values(ef.value, value_key=p_k, data_type=ef.data_type)}
|
||||
cast = get_col_cast(data_type=ef.data_type, value=ef.value)
|
||||
if ef.is_predefined:
|
||||
sub_condition = get_sub_condition(col_name=f"accurateCastOrNull(`{ef.name}`,'{cast}')",
|
||||
val_name=p_k, operator=ef.operator)
|
||||
else:
|
||||
sub_condition = get_sub_condition(col_name=f"accurateCastOrNull(properties.`{ef.name}`,{cast})",
|
||||
val_name=p_k, operator=ef.operator)
|
||||
sub_conditions.append(sh.multi_conditions(sub_condition, ef.value, value_key=p_k))
|
||||
if len(sub_conditions) > 0:
|
||||
condition += " AND (" + (" " + f.properties.operator + " ").join(sub_conditions) + ")"
|
||||
|
||||
ev_constraints.append(condition)
|
||||
|
||||
constraints.append("(" + " OR ".join(ev_constraints) + ")")
|
||||
query = ch_client.format(
|
||||
f"""SELECT COUNT(1) OVER () AS total,
|
||||
event_id,
|
||||
`$event_name`,
|
||||
created_at,
|
||||
`distinct_id`,
|
||||
`$browser`,
|
||||
`$import`,
|
||||
`$os`,
|
||||
`$country`,
|
||||
`$state`,
|
||||
`$city`,
|
||||
`$screen_height`,
|
||||
`$screen_width`,
|
||||
`$source`,
|
||||
`$user_id`,
|
||||
`$device`
|
||||
FROM product_analytics.events
|
||||
WHERE {" AND ".join(constraints)}
|
||||
ORDER BY created_at
|
||||
LIMIT %(limit)s OFFSET %(offset)s;""",
|
||||
parameters=full_args)
|
||||
rows = ch_client.execute(query)
|
||||
if len(rows) == 0:
|
||||
return {"total": 0, "rows": [], "_src": 2}
|
||||
total = rows[0]["total"]
|
||||
for r in rows:
|
||||
r.pop("total")
|
||||
return {"total": total, "rows": rows, "_src": 2}
|
||||
|
||||
|
||||
def get_lexicon(project_id: int, page: schemas.PaginatedSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT COUNT(1) OVER () AS total, all_events.event_name AS name,
|
||||
*
|
||||
FROM product_analytics.all_events
|
||||
WHERE project_id = %(project_id)s
|
||||
ORDER BY display_name
|
||||
LIMIT %(limit)s
|
||||
OFFSET %(offset)s;""",
|
||||
parameters={"project_id": project_id, "limit": page.limit, "offset": (page.page - 1) * page.limit})
|
||||
rows = ch_client.execute(r)
|
||||
if len(rows) == 0:
|
||||
return {"total": 0, "list": []}
|
||||
total = rows[0]["total"]
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
for i, row in enumerate(rows):
|
||||
row["id"] = f"event_{i}"
|
||||
row["dataType"] = "string"
|
||||
row["possibleTypes"] = ["string"]
|
||||
row["_foundInPredefinedList"] = True
|
||||
row.pop("total")
|
||||
return {"total": total, "list": rows}
|
||||
156
api/chalicelib/core/product_analytics/filters.py
Normal file
156
api/chalicelib/core/product_analytics/filters.py
Normal file
|
|
@ -0,0 +1,156 @@
|
|||
import schemas
|
||||
|
||||
|
||||
def get_sessions_filters(project_id: int):
|
||||
return {"total": 13,
|
||||
"list": [
|
||||
{
|
||||
"id": "sf_1",
|
||||
"name": schemas.FilterType.REFERRER,
|
||||
"displayName": "Referrer",
|
||||
"possibleTypes": [
|
||||
"String"
|
||||
],
|
||||
"autoCaptured": True
|
||||
},
|
||||
{
|
||||
"id": "sf_2",
|
||||
"name": schemas.FilterType.DURATION,
|
||||
"displayName": "Duration",
|
||||
"possibleTypes": [
|
||||
"int"
|
||||
],
|
||||
"autoCaptured": True
|
||||
},
|
||||
{
|
||||
"id": "sf_3",
|
||||
"name": schemas.FilterType.UTM_SOURCE,
|
||||
"displayName": "UTM Source",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"autoCaptured": True
|
||||
},
|
||||
{
|
||||
"id": "sf_4",
|
||||
"name": schemas.FilterType.UTM_MEDIUM,
|
||||
"displayName": "UTM Medium",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"autoCaptured": True
|
||||
},
|
||||
{
|
||||
"id": "sf_5",
|
||||
"name": schemas.FilterType.UTM_CAMPAIGN,
|
||||
"displayName": "UTM Campaign",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"autoCaptured": True
|
||||
},
|
||||
{
|
||||
"id": "sf_6",
|
||||
"name": schemas.FilterType.USER_COUNTRY,
|
||||
"displayName": "Country",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"autoCaptured": True
|
||||
},
|
||||
{
|
||||
"id": "sf_7",
|
||||
"name": schemas.FilterType.USER_CITY,
|
||||
"displayName": "City",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"autoCaptured": True
|
||||
},
|
||||
{
|
||||
"id": "sf_8",
|
||||
"name": schemas.FilterType.USER_STATE,
|
||||
"displayName": "State / Province",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"autoCaptured": True
|
||||
},
|
||||
{
|
||||
"id": "sf_9",
|
||||
"name": schemas.FilterType.USER_OS,
|
||||
"displayName": "OS",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"autoCaptured": True
|
||||
},
|
||||
{
|
||||
"id": "sf_10",
|
||||
"name": schemas.FilterType.USER_BROWSER,
|
||||
"displayName": "Browser",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"autoCaptured": True
|
||||
},
|
||||
{
|
||||
"id": "sf_11",
|
||||
"name": schemas.FilterType.USER_DEVICE,
|
||||
"displayName": "Device",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"autoCaptured": True
|
||||
},
|
||||
{
|
||||
"id": "sf_12",
|
||||
"name": schemas.FilterType.PLATFORM,
|
||||
"displayName": "Platform",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"autoCaptured": True
|
||||
},
|
||||
{
|
||||
"id": "sf_13",
|
||||
"name": schemas.FilterType.REV_ID,
|
||||
"displayName": "Version ID",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"autoCaptured": True
|
||||
}
|
||||
]}
|
||||
|
||||
|
||||
def get_users_filters(project_id: int):
|
||||
return {"total": 2,
|
||||
"list": [
|
||||
{
|
||||
"id": "uf_1",
|
||||
"name": schemas.FilterType.USER_ID,
|
||||
"displayName": "User ID",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"autoCaptured": False
|
||||
},
|
||||
{
|
||||
"id": "uf_2",
|
||||
"name": schemas.FilterType.USER_ANONYMOUS_ID,
|
||||
"displayName": "User Anonymous ID",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"autoCaptured": False
|
||||
}
|
||||
]}
|
||||
|
||||
|
||||
def get_global_filters(project_id: int):
|
||||
r = get_sessions_filters(project_id)
|
||||
r = r["list"]
|
||||
for f in r:
|
||||
f["defaultProperty"] = False
|
||||
return r
|
||||
174
api/chalicelib/core/product_analytics/properties.py
Normal file
174
api/chalicelib/core/product_analytics/properties.py
Normal file
|
|
@ -0,0 +1,174 @@
|
|||
import schemas
|
||||
from chalicelib.utils import helper, exp_ch_helper
|
||||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
|
||||
PREDEFINED_PROPERTIES = {
|
||||
"label": "String",
|
||||
"hesitation_time": "UInt32",
|
||||
"name": "String",
|
||||
"payload": "String",
|
||||
"level": "Enum8",
|
||||
"source": "Enum8",
|
||||
"message": "String",
|
||||
"error_id": "String",
|
||||
"duration": "UInt16",
|
||||
"context": "Enum8",
|
||||
"url_host": "String",
|
||||
"url_path": "String",
|
||||
"url_hostpath": "String",
|
||||
"request_start": "UInt16",
|
||||
"response_start": "UInt16",
|
||||
"response_end": "UInt16",
|
||||
"dom_content_loaded_event_start": "UInt16",
|
||||
"dom_content_loaded_event_end": "UInt16",
|
||||
"load_event_start": "UInt16",
|
||||
"load_event_end": "UInt16",
|
||||
"first_paint": "UInt16",
|
||||
"first_contentful_paint_time": "UInt16",
|
||||
"speed_index": "UInt16",
|
||||
"visually_complete": "UInt16",
|
||||
"time_to_interactive": "UInt16",
|
||||
"ttfb": "UInt16",
|
||||
"ttlb": "UInt16",
|
||||
"response_time": "UInt16",
|
||||
"dom_building_time": "UInt16",
|
||||
"dom_content_loaded_event_time": "UInt16",
|
||||
"load_event_time": "UInt16",
|
||||
"min_fps": "UInt8",
|
||||
"avg_fps": "UInt8",
|
||||
"max_fps": "UInt8",
|
||||
"min_cpu": "UInt8",
|
||||
"avg_cpu": "UInt8",
|
||||
"max_cpu": "UInt8",
|
||||
"min_total_js_heap_size": "UInt64",
|
||||
"avg_total_js_heap_size": "UInt64",
|
||||
"max_total_js_heap_size": "UInt64",
|
||||
"min_used_js_heap_size": "UInt64",
|
||||
"avg_used_js_heap_size": "UInt64",
|
||||
"max_used_js_heap_size": "UInt64",
|
||||
"method": "Enum8",
|
||||
"status": "UInt16",
|
||||
"success": "UInt8",
|
||||
"request_body": "String",
|
||||
"response_body": "String",
|
||||
"transfer_size": "UInt32",
|
||||
"selector": "String",
|
||||
"normalized_x": "Float32",
|
||||
"normalized_y": "Float32",
|
||||
"message_id": "UInt64"
|
||||
}
|
||||
|
||||
EVENT_DEFAULT_PROPERTIES = {
|
||||
"CLICK": "label",
|
||||
"INPUT": "label",
|
||||
"LOCATION": "url_path",
|
||||
"ERROR": "name",
|
||||
"REQUEST": "url_path"
|
||||
}
|
||||
|
||||
|
||||
def get_all_properties(project_id: int):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT COUNT(1) OVER () AS total, property_name AS name,
|
||||
display_name,
|
||||
array_agg(DISTINCT event_properties.value_type) AS possible_types
|
||||
FROM product_analytics.all_properties
|
||||
LEFT JOIN product_analytics.event_properties USING (project_id, property_name)
|
||||
WHERE all_properties.project_id = %(project_id)s
|
||||
GROUP BY property_name, display_name
|
||||
ORDER BY display_name, property_name;""",
|
||||
parameters={"project_id": project_id})
|
||||
properties = ch_client.execute(r)
|
||||
if len(properties) == 0:
|
||||
return {"total": 0, "list": []}
|
||||
total = properties[0]["total"]
|
||||
properties = helper.list_to_camel_case(properties)
|
||||
for i, p in enumerate(properties):
|
||||
p["id"] = f"prop_{i}"
|
||||
p["_foundInPredefinedList"] = False
|
||||
if p["name"] in PREDEFINED_PROPERTIES:
|
||||
p["dataType"] = exp_ch_helper.simplify_clickhouse_type(PREDEFINED_PROPERTIES[p["name"]])
|
||||
p["_foundInPredefinedList"] = True
|
||||
p["possibleTypes"] = list(set(exp_ch_helper.simplify_clickhouse_types(p["possibleTypes"])))
|
||||
p.pop("total")
|
||||
keys = [p["name"] for p in properties]
|
||||
for p in PREDEFINED_PROPERTIES:
|
||||
if p not in keys:
|
||||
total += 1
|
||||
properties.append({
|
||||
"name": p,
|
||||
"displayName": "",
|
||||
"possibleTypes": [
|
||||
],
|
||||
"id": f"prop_{len(properties) + 1}",
|
||||
"_foundInPredefinedList": False,
|
||||
"dataType": PREDEFINED_PROPERTIES[p]
|
||||
})
|
||||
return {"total": total, "list": properties}
|
||||
|
||||
|
||||
def get_event_properties(project_id: int, event_name: str, auto_captured: bool):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT all_properties.property_name AS name,
|
||||
all_properties.display_name,
|
||||
array_agg(DISTINCT event_properties.value_type) AS possible_types
|
||||
FROM product_analytics.event_properties
|
||||
INNER JOIN product_analytics.all_properties USING (property_name)
|
||||
WHERE event_properties.project_id = %(project_id)s
|
||||
AND all_properties.project_id = %(project_id)s
|
||||
AND event_properties.event_name = %(event_name)s
|
||||
AND event_properties.auto_captured = %(auto_captured)s
|
||||
GROUP BY ALL
|
||||
ORDER BY 1;""",
|
||||
parameters={"project_id": project_id, "event_name": event_name, "auto_captured": auto_captured})
|
||||
properties = ch_client.execute(r)
|
||||
properties = helper.list_to_camel_case(properties)
|
||||
for i, p in enumerate(properties):
|
||||
p["id"] = f"prop_{i}"
|
||||
p["_foundInPredefinedList"] = False
|
||||
if p["name"] in PREDEFINED_PROPERTIES:
|
||||
p["dataType"] = exp_ch_helper.simplify_clickhouse_type(PREDEFINED_PROPERTIES[p["name"]])
|
||||
p["_foundInPredefinedList"] = True
|
||||
p["possibleTypes"] = list(set(exp_ch_helper.simplify_clickhouse_types(p["possibleTypes"])))
|
||||
p["defaultProperty"] = auto_captured and event_name in EVENT_DEFAULT_PROPERTIES \
|
||||
and p["name"] == EVENT_DEFAULT_PROPERTIES[event_name]
|
||||
|
||||
return properties
|
||||
|
||||
|
||||
def get_lexicon(project_id: int, page: schemas.PaginatedSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT COUNT(1) OVER () AS total, all_properties.property_name AS name,
|
||||
all_properties.*,
|
||||
possible_types.values AS possible_types,
|
||||
possible_values.values AS sample_values
|
||||
FROM product_analytics.all_properties
|
||||
LEFT JOIN (SELECT project_id, property_name, array_agg(DISTINCT value_type) AS
|
||||
values
|
||||
FROM product_analytics.event_properties
|
||||
WHERE project_id=%(project_id)s
|
||||
GROUP BY 1, 2) AS possible_types
|
||||
USING (project_id, property_name)
|
||||
LEFT JOIN (SELECT project_id, property_name, array_agg(DISTINCT value) AS
|
||||
values
|
||||
FROM product_analytics.property_values_samples
|
||||
WHERE project_id=%(project_id)s
|
||||
GROUP BY 1, 2) AS possible_values USING (project_id, property_name)
|
||||
WHERE project_id = %(project_id)s
|
||||
ORDER BY display_name
|
||||
LIMIT %(limit)s
|
||||
OFFSET %(offset)s;""",
|
||||
parameters={"project_id": project_id,
|
||||
"limit": page.limit,
|
||||
"offset": (page.page - 1) * page.limit})
|
||||
properties = ch_client.execute(r)
|
||||
if len(properties) == 0:
|
||||
return {"total": 0, "list": []}
|
||||
total = properties[0]["total"]
|
||||
for i, p in enumerate(properties):
|
||||
p["id"] = f"prop_{i}"
|
||||
p.pop("total")
|
||||
return {"total": total, "list": helper.list_to_camel_case(properties)}
|
||||
|
|
@ -6,8 +6,18 @@ logger = logging.getLogger(__name__)
|
|||
from . import sessions_pg
|
||||
from . import sessions_pg as sessions_legacy
|
||||
from . import sessions_ch
|
||||
from . import sessions_search_pg
|
||||
from . import sessions_search_pg as sessions_search_legacy
|
||||
|
||||
if config("EXP_METRICS", cast=bool, default=False):
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental sessions search")
|
||||
from . import sessions_ch as sessions
|
||||
from . import sessions_search_ch as sessions_search
|
||||
else:
|
||||
from . import sessions_pg as sessions
|
||||
from . import sessions_search_pg as sessions_search
|
||||
|
||||
# if config("EXP_METRICS", cast=bool, default=False):
|
||||
# from . import sessions_ch as sessions
|
||||
# else:
|
||||
# from . import sessions_pg as sessions
|
||||
|
|
|
|||
|
|
@ -2,10 +2,12 @@ import logging
|
|||
from typing import List, Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata
|
||||
from chalicelib.core import metadata
|
||||
from chalicelib.core.events import events
|
||||
from . import performance_event, sessions_legacy
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.exp_ch_helper import get_sub_condition, get_col_cast
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -48,8 +50,8 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
query = f"""SELECT gs.generate_series AS timestamp,
|
||||
COALESCE(COUNT(DISTINCT processed_sessions.user_id),0) AS count
|
||||
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS gs
|
||||
LEFT JOIN (SELECT multiIf(s.user_id IS NOT NULL AND s.user_id != '', s.user_id,
|
||||
s.user_anonymous_id IS NOT NULL AND s.user_anonymous_id != '',
|
||||
LEFT JOIN (SELECT multiIf(isNotNull(s.user_id) AND notEmpty(s.user_id), s.user_id,
|
||||
isNotNull(s.user_anonymous_id) AND notEmpty(s.user_anonymous_id),
|
||||
s.user_anonymous_id, toString(s.user_uuid)) AS user_id,
|
||||
s.datetime AS datetime
|
||||
{query_part}) AS processed_sessions ON(TRUE)
|
||||
|
|
@ -148,12 +150,12 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
for e in data.events:
|
||||
if e.type == schemas.EventType.LOCATION:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema(**{
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
"operator": e.operator,
|
||||
"filters": []
|
||||
"filters": e.filters
|
||||
})
|
||||
for v in e.value:
|
||||
if v not in extra_conditions[e.operator].value:
|
||||
|
|
@ -173,12 +175,12 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
for e in data.events:
|
||||
if e.type == schemas.EventType.REQUEST_DETAILS:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema(**{
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
"operator": e.operator,
|
||||
"filters": []
|
||||
"filters": e.filters
|
||||
})
|
||||
for v in e.value:
|
||||
if v not in extra_conditions[e.operator].value:
|
||||
|
|
@ -238,8 +240,10 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
main_query = f"""SELECT COUNT(DISTINCT {main_col}) OVER () AS main_count,
|
||||
{main_col} AS name,
|
||||
count(DISTINCT session_id) AS total,
|
||||
COALESCE(SUM(count(DISTINCT session_id)) OVER (), 0) AS total_count
|
||||
FROM (SELECT s.session_id AS session_id {extra_col}
|
||||
any(total_count) as total_count
|
||||
FROM (SELECT s.session_id AS session_id,
|
||||
count(DISTINCT s.session_id) OVER () AS total_count
|
||||
{extra_col}
|
||||
{query_part}) AS filtred_sessions
|
||||
{extra_where}
|
||||
GROUP BY {main_col}
|
||||
|
|
@ -249,11 +253,13 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
main_query = f"""SELECT COUNT(DISTINCT {main_col}) OVER () AS main_count,
|
||||
{main_col} AS name,
|
||||
count(DISTINCT user_id) AS total,
|
||||
COALESCE(SUM(count(DISTINCT user_id)) OVER (), 0) AS total_count
|
||||
FROM (SELECT s.user_id AS user_id {extra_col}
|
||||
any(total_count) AS total_count
|
||||
FROM (SELECT s.user_id AS user_id,
|
||||
count(DISTINCT s.user_id) OVER () AS total_count
|
||||
{extra_col}
|
||||
{query_part}
|
||||
WHERE isNotNull(user_id)
|
||||
AND user_id != '') AS filtred_sessions
|
||||
AND notEmpty(user_id)) AS filtred_sessions
|
||||
{extra_where}
|
||||
GROUP BY {main_col}
|
||||
ORDER BY total DESC
|
||||
|
|
@ -277,7 +283,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
return sessions
|
||||
|
||||
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema):
|
||||
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
|
||||
schemas.EventType.GRAPHQL] \
|
||||
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
|
||||
|
|
@ -330,7 +336,11 @@ def json_condition(table_alias, json_column, json_key, op, values, value_key, ch
|
|||
extract_func = "JSONExtractFloat" if numeric_type == "float" else "JSONExtractInt"
|
||||
condition = f"{extract_func}(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s"
|
||||
else:
|
||||
condition = f"JSONExtractString(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s"
|
||||
# condition = f"JSONExtractString(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s"
|
||||
condition = get_sub_condition(
|
||||
col_name=f"JSONExtractString(toString({table_alias}.`{json_column}`), '{json_key}')",
|
||||
val_name=value_key, operator=op
|
||||
)
|
||||
|
||||
conditions.append(sh.multi_conditions(condition, values, value_key=value_key))
|
||||
|
||||
|
|
@ -373,6 +383,35 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
events_conditions_where = ["main.project_id = %(projectId)s",
|
||||
"main.created_at >= toDateTime(%(startDate)s/1000)",
|
||||
"main.created_at <= toDateTime(%(endDate)s/1000)"]
|
||||
any_incident = False
|
||||
for i, e in enumerate(data.events):
|
||||
if e.type == schemas.EventType.INCIDENT and e.operator == schemas.SearchEventOperator.IS_ANY:
|
||||
any_incident = True
|
||||
data.events.pop(i)
|
||||
# don't stop here because we could have multiple filters looking for any incident
|
||||
|
||||
if any_incident:
|
||||
any_incident = False
|
||||
for f in data.filters:
|
||||
if f.type == schemas.FilterType.ISSUE:
|
||||
any_incident = True
|
||||
if f.value.index(schemas.IssueType.INCIDENT) < 0:
|
||||
f.value.append(schemas.IssueType.INCIDENT)
|
||||
if f.operator == schemas.SearchEventOperator.IS_ANY:
|
||||
f.operator = schemas.SearchEventOperator.IS
|
||||
break
|
||||
|
||||
if not any_incident:
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(**{
|
||||
"type": "issue",
|
||||
"isEvent": False,
|
||||
"value": [
|
||||
"incident"
|
||||
],
|
||||
"operator": "is"
|
||||
}))
|
||||
global_properties = []
|
||||
global_properties_negative = []
|
||||
if len(data.filters) > 0:
|
||||
meta_keys = None
|
||||
# to reduce include a sub-query of sessions inside events query, in order to reduce the selected data
|
||||
|
|
@ -391,6 +430,23 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
is_not = False
|
||||
if sh.is_negation_operator(f.operator):
|
||||
is_not = True
|
||||
if not f.auto_captured:
|
||||
cast = get_col_cast(data_type=f.data_type, value=f.value)
|
||||
if is_any:
|
||||
global_properties.append(f'isNotNull(e.properties.`{f.type}`)')
|
||||
else:
|
||||
if is_not:
|
||||
op = sh.reverse_sql_operator(op)
|
||||
global_properties_negative.append(sh.multi_conditions(get_sub_condition(
|
||||
col_name=f"accurateCastOrNull(e.properties.`{f.type}`,'{cast}')",
|
||||
val_name=f_k, operator=op), f.value, is_not=False, value_key=f_k))
|
||||
else:
|
||||
global_properties.append(sh.multi_conditions(get_sub_condition(
|
||||
col_name=f"accurateCastOrNull(e.properties.`{f.type}`,'{cast}')",
|
||||
val_name=f_k, operator=f.operator), f.value, is_not=False, value_key=f_k))
|
||||
|
||||
continue
|
||||
|
||||
if filter_type == schemas.FilterType.USER_BROWSER:
|
||||
if is_any:
|
||||
extra_constraints.append('isNotNull(s.user_browser)')
|
||||
|
|
@ -516,7 +572,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
ss_constraints.append(
|
||||
sh.multi_conditions(f"ms.base_referrer {op} toString(%({f_k})s)", f.value, is_not=is_not,
|
||||
value_key=f_k))
|
||||
elif filter_type == events.EventType.METADATA.ui_type:
|
||||
elif filter_type == schemas.FilterType.METADATA:
|
||||
# to support old metadata-filter structure
|
||||
# get metadata list only if you need it
|
||||
if meta_keys is None:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
|
|
@ -537,6 +594,23 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
sh.multi_conditions(
|
||||
f"ms.{metadata.index_to_colname(meta_keys[f.source])} {op} toString(%({f_k})s)",
|
||||
f.value, is_not=is_not, value_key=f_k))
|
||||
elif filter_type.startswith(schemas.FilterType.METADATA):
|
||||
# to support new metadata-filter structure
|
||||
|
||||
if is_any:
|
||||
extra_constraints.append(f"isNotNull(s.{filter_type})")
|
||||
ss_constraints.append(f"isNotNull(ms.{filter_type})")
|
||||
elif is_undefined:
|
||||
extra_constraints.append(f"isNull(s.{filter_type})")
|
||||
ss_constraints.append(f"isNull(ms.{filter_type})")
|
||||
else:
|
||||
extra_constraints.append(
|
||||
sh.multi_conditions(f"s.{filter_type} {op} toString(%({f_k})s)",
|
||||
f.value, is_not=is_not, value_key=f_k))
|
||||
ss_constraints.append(
|
||||
sh.multi_conditions(f"ms.{filter_type} {op} toString(%({f_k})s)",
|
||||
f.value, is_not=is_not, value_key=f_k))
|
||||
|
||||
elif filter_type in [schemas.FilterType.USER_ID, schemas.FilterType.USER_ID_MOBILE]:
|
||||
if is_any:
|
||||
extra_constraints.append('isNotNull(s.user_id)')
|
||||
|
|
@ -620,6 +694,11 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
events_conditions_where.append(f"""main.session_id IN (SELECT s.session_id
|
||||
FROM {MAIN_SESSIONS_TABLE} AS s
|
||||
WHERE {" AND ".join(extra_constraints)})""")
|
||||
|
||||
if len(global_properties) > 0:
|
||||
global_properties += ["e.project_id=%(project_id)s",
|
||||
"e.created_at >= toDateTime(%(startDate)s/1000)",
|
||||
"e.created_at <= toDateTime(%(endDate)s/1000)"]
|
||||
# ---------------------------------------------------------------------------
|
||||
events_extra_join = ""
|
||||
if len(data.events) > 0:
|
||||
|
|
@ -660,39 +739,60 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
event.value = helper.values_for_operator(value=event.value, op=event.operator)
|
||||
full_args = {**full_args,
|
||||
**sh.multi_values(event.value, value_key=e_k),
|
||||
**sh.multi_values(event.source, value_key=s_k)}
|
||||
**sh.multi_values(event.source, value_key=s_k),
|
||||
e_k: event.value[0] if len(event.value) > 0 else event.value}
|
||||
|
||||
if event_type == events.EventType.CLICK.ui_type:
|
||||
if event_type == schemas.EventType.CLICK:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
if platform == "web":
|
||||
_column = events.EventType.CLICK.column
|
||||
_column = "label"
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
if schemas.ClickEventExtraOperator.has_value(event.operator):
|
||||
event_where.append(json_condition(
|
||||
"main",
|
||||
"$properties",
|
||||
"selector", op, event.value, e_k)
|
||||
# event_where.append(json_condition(
|
||||
# "main",
|
||||
# "$properties",
|
||||
# "selector", op, event.value, e_k)
|
||||
# )
|
||||
event_where.append(
|
||||
sh.multi_conditions(
|
||||
get_sub_condition(col_name=f"main.`$properties`.selector",
|
||||
val_name=e_k, operator=event.operator),
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
if is_not:
|
||||
event_where.append(json_condition(
|
||||
"sub", "$properties", _column, op, event.value, e_k
|
||||
))
|
||||
# event_where.append(json_condition(
|
||||
# "sub", "$properties", _column, op, event.value, e_k
|
||||
# ))
|
||||
event_where.append(
|
||||
sh.multi_conditions(
|
||||
get_sub_condition(col_name=f"sub.`$properties`.{_column}",
|
||||
val_name=e_k, operator=event.operator),
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions_not.append(
|
||||
{
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"
|
||||
}
|
||||
)
|
||||
events_conditions_not[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
# event_where.append(
|
||||
# json_condition("main", "$properties", _column, op, event.value, e_k)
|
||||
# )
|
||||
event_where.append(
|
||||
json_condition("main", "$properties", _column, op, event.value, e_k)
|
||||
sh.multi_conditions(
|
||||
get_sub_condition(col_name=f"main.`$properties`.{_column}",
|
||||
val_name=e_k, operator=event.operator),
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
_column = events.EventType.CLICK_MOBILE.column
|
||||
_column = "label"
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -711,10 +811,10 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
|
||||
elif event_type == events.EventType.INPUT.ui_type:
|
||||
elif event_type == schemas.EventType.INPUT:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
if platform == "web":
|
||||
_column = events.EventType.INPUT.column
|
||||
_column = "label"
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -739,7 +839,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
|
||||
full_args = {**full_args, **sh.multi_values(event.source, value_key=f"custom{i}")}
|
||||
else:
|
||||
_column = events.EventType.INPUT_MOBILE.column
|
||||
_column = "label"
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -759,7 +859,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
|
||||
elif event_type == events.EventType.LOCATION.ui_type:
|
||||
elif event_type == schemas.EventType.LOCATION:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
if platform == "web":
|
||||
_column = 'url_path'
|
||||
|
|
@ -781,7 +881,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
_column = events.EventType.VIEW_MOBILE.column
|
||||
_column = "name"
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -798,9 +898,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == events.EventType.CUSTOM.ui_type:
|
||||
elif event_type == schemas.EventType.CUSTOM:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = events.EventType.CUSTOM.column
|
||||
_column = "name"
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -818,7 +918,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
"main", "$properties", _column, op, event.value, e_k
|
||||
))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == events.EventType.REQUEST.ui_type:
|
||||
elif event_type == schemas.EventType.REQUEST:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = 'url_path'
|
||||
event_where.append(
|
||||
|
|
@ -839,9 +939,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
|
||||
elif event_type == events.EventType.STATEACTION.ui_type:
|
||||
elif event_type == schemas.EventType.STATE_ACTION:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = events.EventType.STATEACTION.column
|
||||
_column = "name"
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -860,7 +960,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
# TODO: isNot for ERROR
|
||||
elif event_type == events.EventType.ERROR.ui_type:
|
||||
elif event_type == schemas.EventType.ERROR:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main"
|
||||
events_extra_join = f"SELECT * FROM {MAIN_EVENTS_TABLE} AS main1 WHERE main1.project_id=%(project_id)s"
|
||||
event_where.append(
|
||||
|
|
@ -870,20 +970,23 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
events_conditions[-1]["condition"] = []
|
||||
if not is_any and event.value not in [None, "*", ""]:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
|
||||
event.value, value_key=e_k))
|
||||
sh.multi_conditions(
|
||||
f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
|
||||
event.value, value_key=e_k))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
events_extra_join += f" AND {event_where[-1]}"
|
||||
if len(event.source) > 0 and event.source[0] not in [None, "*", ""]:
|
||||
event_where.append(sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source, value_key=s_k))
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source,
|
||||
value_key=s_k))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
events_extra_join += f" AND {event_where[-1]}"
|
||||
|
||||
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
|
||||
|
||||
# ----- Mobile
|
||||
elif event_type == events.EventType.CLICK_MOBILE.ui_type:
|
||||
_column = events.EventType.CLICK_MOBILE.column
|
||||
elif event_type == schemas.EventType.CLICK_MOBILE:
|
||||
_column = "label"
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -901,8 +1004,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
"main", "$properties", _column, op, event.value, e_k
|
||||
))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
|
||||
_column = events.EventType.INPUT_MOBILE.column
|
||||
elif event_type == schemas.EventType.INPUT_MOBILE:
|
||||
_column = "label"
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -920,8 +1023,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
"main", "$properties", _column, op, event.value, e_k
|
||||
))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
|
||||
_column = events.EventType.VIEW_MOBILE.column
|
||||
elif event_type == schemas.EventType.VIEW_MOBILE:
|
||||
_column = "name"
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -939,8 +1042,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
"main", "$properties", _column, op, event.value, e_k
|
||||
))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
|
||||
_column = events.EventType.CUSTOM_MOBILE.column
|
||||
elif event_type == schemas.EventType.CUSTOM_MOBILE:
|
||||
_column = "name"
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -959,7 +1062,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
))
|
||||
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == events.EventType.REQUEST_MOBILE.ui_type:
|
||||
elif event_type == schemas.EventType.REQUEST_MOBILE:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = 'url_path'
|
||||
event_where.append(
|
||||
|
|
@ -979,8 +1082,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
"main", "$properties", _column, op, event.value, e_k
|
||||
))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == events.EventType.CRASH_MOBILE.ui_type:
|
||||
_column = events.EventType.CRASH_MOBILE.column
|
||||
elif event_type == schemas.EventType.ERROR_MOBILE:
|
||||
_column = "name"
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -999,8 +1102,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
"main", "$properties", _column, op, event.value, e_k
|
||||
))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == events.EventType.SWIPE_MOBILE.ui_type and platform != "web":
|
||||
_column = events.EventType.SWIPE_MOBILE.column
|
||||
elif event_type == schemas.EventType.SWIPE_MOBILE and platform != "web":
|
||||
_column = "label"
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -1108,8 +1211,12 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
is_any = sh.isAny_opreator(f.operator)
|
||||
if is_any or len(f.value) == 0:
|
||||
continue
|
||||
is_negative_operator = sh.is_negation_operator(f.operator)
|
||||
f.value = helper.values_for_operator(value=f.value, op=f.operator)
|
||||
op = sh.get_sql_operator(f.operator)
|
||||
r_op = ""
|
||||
if is_negative_operator:
|
||||
r_op = sh.reverse_sql_operator(op)
|
||||
e_k_f = e_k + f"_fetch{j}"
|
||||
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
|
||||
if f.type == schemas.FetchFilterType.FETCH_URL:
|
||||
|
|
@ -1118,6 +1225,12 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
apply = True
|
||||
if is_negative_operator:
|
||||
events_conditions_not.append(
|
||||
{
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
||||
events_conditions_not[-1]["condition"] = sh.multi_conditions(
|
||||
f"sub.`$properties`.url_path {r_op} %({e_k_f})s", f.value, value_key=e_k_f)
|
||||
elif f.type == schemas.FetchFilterType.FETCH_STATUS_CODE:
|
||||
event_where.append(json_condition(
|
||||
"main", "$properties", 'status', op, f.value, e_k_f, True, True
|
||||
|
|
@ -1130,6 +1243,13 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
apply = True
|
||||
if is_negative_operator:
|
||||
events_conditions_not.append(
|
||||
{
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
||||
events_conditions_not[-1]["condition"] = sh.multi_conditions(
|
||||
f"sub.`$properties`.method {r_op} %({e_k_f})s", f.value,
|
||||
value_key=e_k_f)
|
||||
elif f.type == schemas.FetchFilterType.FETCH_DURATION:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.`$duration_s` {f.operator} %({e_k_f})s/1000", f.value,
|
||||
|
|
@ -1142,12 +1262,26 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
apply = True
|
||||
if is_negative_operator:
|
||||
events_conditions_not.append(
|
||||
{
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
||||
events_conditions_not[-1]["condition"] = sh.multi_conditions(
|
||||
f"sub.`$properties`.request_body {r_op} %({e_k_f})s", f.value,
|
||||
value_key=e_k_f)
|
||||
elif f.type == schemas.FetchFilterType.FETCH_RESPONSE_BODY:
|
||||
event_where.append(json_condition(
|
||||
"main", "$properties", 'response_body', op, f.value, e_k_f
|
||||
))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
apply = True
|
||||
if is_negative_operator:
|
||||
events_conditions_not.append(
|
||||
{
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
||||
events_conditions_not[-1]["condition"] = sh.multi_conditions(
|
||||
f"sub.`$properties`.response_body {r_op} %({e_k_f})s", f.value,
|
||||
value_key=e_k_f)
|
||||
else:
|
||||
logging.warning(f"undefined FETCH filter: {f.type}")
|
||||
if not apply:
|
||||
|
|
@ -1170,7 +1304,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
|
||||
if f.type == schemas.GraphqlFilterType.GRAPHQL_NAME:
|
||||
event_where.append(json_condition(
|
||||
"main", "$properties", events.EventType.GRAPHQL.column, op, f.value, e_k_f
|
||||
"main", "$properties", "name", op, f.value, e_k_f
|
||||
))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
elif f.type == schemas.GraphqlFilterType.GRAPHQL_METHOD:
|
||||
|
|
@ -1191,8 +1325,92 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
else:
|
||||
logging.warning(f"undefined GRAPHQL filter: {f.type}")
|
||||
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
|
||||
elif event_type == schemas.EventType.EVENT:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = "label"
|
||||
event_where.append(f"main.`$event_name`=%({e_k})s AND main.session_id>0")
|
||||
events_conditions.append({"type": event_where[-1], "condition": ""})
|
||||
elif event_type == schemas.EventType.INCIDENT:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = "label"
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
||||
if is_not:
|
||||
event_where.append(
|
||||
sh.multi_conditions(
|
||||
get_sub_condition(col_name=f"sub.`$properties`.{_column}",
|
||||
val_name=e_k, operator=event.operator),
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions_not.append(
|
||||
{
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"
|
||||
}
|
||||
)
|
||||
events_conditions_not[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
|
||||
event_where.append(
|
||||
sh.multi_conditions(
|
||||
get_sub_condition(col_name=f"main.`$properties`.{_column}",
|
||||
val_name=e_k, operator=event.operator),
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == schemas.EventType.CLICK_COORDINATES:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(schemas.EventType.CLICK, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
||||
if is_not:
|
||||
event_where.append(
|
||||
sh.coordinate_conditions(
|
||||
condition_x=f"sub.`$properties`.normalized_x",
|
||||
condition_y=f"sub.`$properties`.normalized_y",
|
||||
values=event.value, value_key=e_k, is_not=True)
|
||||
)
|
||||
events_conditions_not.append(
|
||||
{
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(schemas.EventType.CLICK, platform=platform)}'"
|
||||
}
|
||||
)
|
||||
events_conditions_not[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
event_where.append(
|
||||
sh.coordinate_conditions(
|
||||
condition_x=f"main.`$properties`.normalized_x",
|
||||
condition_y=f"main.`$properties`.normalized_y",
|
||||
values=event.value, value_key=e_k, is_not=True)
|
||||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
|
||||
else:
|
||||
continue
|
||||
if event.properties is not None and len(event.properties.filters) > 0:
|
||||
sub_conditions = []
|
||||
for l, property in enumerate(event.properties.filters):
|
||||
a_k = f"{e_k}_att_{l}"
|
||||
full_args = {**full_args,
|
||||
**sh.multi_values(property.value, value_key=a_k, data_type=property.data_type)}
|
||||
cast = get_col_cast(data_type=property.data_type, value=property.value)
|
||||
if property.is_predefined:
|
||||
condition = get_sub_condition(col_name=f"accurateCastOrNull(main.`{property.name}`,'{cast}')",
|
||||
val_name=a_k, operator=property.operator)
|
||||
else:
|
||||
condition = get_sub_condition(
|
||||
col_name=f"accurateCastOrNull(main.properties.`{property.name}`,'{cast}')",
|
||||
val_name=a_k, operator=property.operator)
|
||||
event_where.append(
|
||||
sh.multi_conditions(condition, property.value, value_key=a_k)
|
||||
)
|
||||
sub_conditions.append(event_where[-1])
|
||||
if len(sub_conditions) > 0:
|
||||
sub_conditions = (" " + event.properties.operator + " ").join(sub_conditions)
|
||||
events_conditions[-1]["condition"] += " AND " if len(events_conditions[-1]["condition"]) > 0 else ""
|
||||
events_conditions[-1]["condition"] += "(" + sub_conditions + ")"
|
||||
if event_index == 0 or or_events:
|
||||
event_where += ss_constraints
|
||||
if is_not:
|
||||
|
|
@ -1395,17 +1613,30 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
if extra_conditions and len(extra_conditions) > 0:
|
||||
_extra_or_condition = []
|
||||
for i, c in enumerate(extra_conditions):
|
||||
if sh.isAny_opreator(c.operator):
|
||||
if sh.isAny_opreator(c.operator) and c.type != schemas.EventType.REQUEST_DETAILS.value:
|
||||
continue
|
||||
e_k = f"ec_value{i}"
|
||||
op = sh.get_sql_operator(c.operator)
|
||||
c.value = helper.values_for_operator(value=c.value, op=c.operator)
|
||||
full_args = {**full_args,
|
||||
**sh.multi_values(c.value, value_key=e_k)}
|
||||
if c.type == events.EventType.LOCATION.ui_type:
|
||||
if c.type in (schemas.EventType.LOCATION.value, schemas.EventType.REQUEST.value):
|
||||
_extra_or_condition.append(
|
||||
sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s",
|
||||
c.value, value_key=e_k))
|
||||
elif c.type == schemas.EventType.REQUEST_DETAILS.value:
|
||||
for j, c_f in enumerate(c.filters):
|
||||
if sh.isAny_opreator(c_f.operator) or len(c_f.value) == 0:
|
||||
continue
|
||||
e_k += f"_{j}"
|
||||
op = sh.get_sql_operator(c_f.operator)
|
||||
c_f.value = helper.values_for_operator(value=c_f.value, op=c_f.operator)
|
||||
full_args = {**full_args,
|
||||
**sh.multi_values(c_f.value, value_key=e_k)}
|
||||
if c_f.type == schemas.FetchFilterType.FETCH_URL.value:
|
||||
_extra_or_condition.append(
|
||||
sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s",
|
||||
c_f.value, value_key=e_k))
|
||||
else:
|
||||
logging.warning(f"unsupported extra_event type:${c.type}")
|
||||
if len(_extra_or_condition) > 0:
|
||||
|
|
@ -1422,6 +1653,18 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
ORDER BY _timestamp DESC) AS s ON(s.session_id=f.session_id)"""
|
||||
else:
|
||||
deduplication_keys = ["session_id"] + extra_deduplication
|
||||
if len(global_properties) > 0:
|
||||
extra_join += f""" INNER JOIN (SELECT DISTINCT session_id
|
||||
FROM {MAIN_EVENTS_TABLE} AS e
|
||||
WHERE {" AND ".join(global_properties)}) AS global_filters USING(session_id)"""
|
||||
if len(global_properties_negative) > 0:
|
||||
extra_join += f""" LEFT JOIN (SELECT DISTINCT session_id
|
||||
FROM {MAIN_EVENTS_TABLE} AS e
|
||||
WHERE project_id=%(project_id)s
|
||||
AND created_at >= toDateTime(%(startDate)s/1000)
|
||||
AND created_at <= toDateTime(%(endDate)s/1000)
|
||||
AND ({" OR ".join(global_properties_negative)})) AS negative_global_filters USING(session_id)"""
|
||||
extra_constraints.append("isNull(negative_global_filters.session_id)")
|
||||
extra_join = f"""(SELECT *
|
||||
FROM {MAIN_SESSIONS_TABLE} AS s {extra_join} {extra_event}
|
||||
WHERE {" AND ".join(extra_constraints)}
|
||||
|
|
@ -1477,18 +1720,15 @@ def get_user_sessions(project_id, user_id, start_date, end_date):
|
|||
def get_session_user(project_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""\
|
||||
SELECT
|
||||
user_id,
|
||||
count(*) as session_count,
|
||||
max(start_ts) as last_seen,
|
||||
min(start_ts) as first_seen
|
||||
FROM
|
||||
"public".sessions
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
AND user_id = %(userId)s
|
||||
AND duration is not null
|
||||
""" \
|
||||
SELECT user_id,
|
||||
count(*) as session_count,
|
||||
max(start_ts) as last_seen,
|
||||
min(start_ts) as first_seen
|
||||
FROM "public".sessions
|
||||
WHERE project_id = %(project_id)s
|
||||
AND user_id = %(userId)s
|
||||
AND duration is not null
|
||||
GROUP BY user_id;
|
||||
""",
|
||||
{"project_id": project_id, "userId": user_id}
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
import ast
|
||||
import logging
|
||||
from typing import List, Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata, projects
|
||||
from chalicelib.core import metadata, projects
|
||||
from chalicelib.core.events import events
|
||||
from chalicelib.core.sessions import performance_event, sessions_favorite, sessions_legacy
|
||||
from chalicelib.utils import pg_client, helper, ch_client, exp_ch_helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
|
@ -219,7 +219,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
|
|||
}
|
||||
|
||||
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema):
|
||||
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
|
||||
schemas.EventType.GRAPHQL] \
|
||||
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
|
||||
|
|
@ -411,7 +411,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
ss_constraints.append(
|
||||
_multiple_conditions(f"ms.base_referrer {op} toString(%({f_k})s)", f.value, is_not=is_not,
|
||||
value_key=f_k))
|
||||
elif filter_type == events.EventType.METADATA.ui_type:
|
||||
elif filter_type == schemas.FilterType.METADATA:
|
||||
# get metadata list only if you need it
|
||||
if meta_keys is None:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
|
|
@ -557,10 +557,10 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
**_multiple_values(event.value, value_key=e_k),
|
||||
**_multiple_values(event.source, value_key=s_k)}
|
||||
|
||||
if event_type == events.EventType.CLICK.ui_type:
|
||||
if event_type == schemas.EventType.CLICK:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
if platform == "web":
|
||||
_column = events.EventType.CLICK.column
|
||||
_column = "label"
|
||||
event_where.append(
|
||||
f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -582,7 +582,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
value_key=e_k))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
_column = events.EventType.CLICK_MOBILE.column
|
||||
_column = "label"
|
||||
event_where.append(
|
||||
f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -599,10 +599,10 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
value_key=e_k))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
|
||||
elif event_type == events.EventType.INPUT.ui_type:
|
||||
elif event_type == schemas.EventType.INPUT:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
if platform == "web":
|
||||
_column = events.EventType.INPUT.column
|
||||
_column = "label"
|
||||
event_where.append(
|
||||
f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -623,7 +623,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
value_key=f"custom{i}"))
|
||||
full_args = {**full_args, **_multiple_values(event.source, value_key=f"custom{i}")}
|
||||
else:
|
||||
_column = events.EventType.INPUT_MOBILE.column
|
||||
_column = "label"
|
||||
event_where.append(
|
||||
f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -640,7 +640,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
value_key=e_k))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
|
||||
elif event_type == events.EventType.LOCATION.ui_type:
|
||||
elif event_type == schemas.EventType.LOCATION:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
if platform == "web":
|
||||
_column = 'url_path'
|
||||
|
|
@ -660,7 +660,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
event.value, value_key=e_k))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
_column = events.EventType.VIEW_MOBILE.column
|
||||
_column = "name"
|
||||
event_where.append(
|
||||
f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -676,9 +676,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == events.EventType.CUSTOM.ui_type:
|
||||
elif event_type == schemas.EventType.CUSTOM:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = events.EventType.CUSTOM.column
|
||||
_column = "name"
|
||||
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
|
|
@ -692,7 +692,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == events.EventType.REQUEST.ui_type:
|
||||
elif event_type == schemas.EventType.REQUEST:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = 'url_path'
|
||||
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
|
|
@ -709,9 +709,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
value_key=e_k))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
|
||||
elif event_type == events.EventType.STATEACTION.ui_type:
|
||||
elif event_type == schemas.EventType.STATE_ACTION:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = events.EventType.STATEACTION.column
|
||||
_column = "name"
|
||||
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
|
|
@ -726,7 +726,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
event.value, value_key=e_k))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
# TODO: isNot for ERROR
|
||||
elif event_type == events.EventType.ERROR.ui_type:
|
||||
elif event_type == schemas.EventType.ERROR:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main"
|
||||
events_extra_join = f"SELECT * FROM {MAIN_EVENTS_TABLE} AS main1 WHERE main1.project_id=%(project_id)s"
|
||||
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
|
|
@ -747,8 +747,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
|
||||
|
||||
# ----- Mobile
|
||||
elif event_type == events.EventType.CLICK_MOBILE.ui_type:
|
||||
_column = events.EventType.CLICK_MOBILE.column
|
||||
elif event_type == schemas.EventType.CLICK_MOBILE:
|
||||
_column = "label"
|
||||
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
|
|
@ -762,8 +762,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
|
||||
_column = events.EventType.INPUT_MOBILE.column
|
||||
elif event_type == schemas.EventType.INPUT_MOBILE:
|
||||
_column = "label"
|
||||
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
|
|
@ -777,8 +777,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
|
||||
_column = events.EventType.VIEW_MOBILE.column
|
||||
elif event_type == schemas.EventType.VIEW_MOBILE:
|
||||
_column = "name"
|
||||
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
|
|
@ -792,8 +792,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
|
||||
_column = events.EventType.CUSTOM_MOBILE.column
|
||||
elif event_type == schemas.EventType.CUSTOM_MOBILE:
|
||||
_column = "name"
|
||||
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
|
|
@ -807,7 +807,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == events.EventType.REQUEST_MOBILE.ui_type:
|
||||
elif event_type == schemas.EventType.REQUEST_MOBILE:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = 'url_path'
|
||||
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
|
|
@ -823,8 +823,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == events.EventType.CRASH_MOBILE.ui_type:
|
||||
_column = events.EventType.CRASH_MOBILE.column
|
||||
elif event_type == schemas.EventType.ERROR_MOBILE:
|
||||
_column = "name"
|
||||
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
|
|
@ -838,8 +838,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == events.EventType.SWIPE_MOBILE.ui_type and platform != "web":
|
||||
_column = events.EventType.SWIPE_MOBILE.column
|
||||
elif event_type == schemas.EventType.SWIPE_MOBILE and platform != "web":
|
||||
_column = "label"
|
||||
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
|
|
@ -993,7 +993,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)}
|
||||
if f.type == schemas.GraphqlFilterType.GRAPHQL_NAME:
|
||||
event_where.append(
|
||||
_multiple_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value,
|
||||
_multiple_conditions(f"main.name {op} %({e_k_f})s", f.value,
|
||||
value_key=e_k_f))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
elif f.type == schemas.GraphqlFilterType.GRAPHQL_METHOD:
|
||||
|
|
@ -1222,7 +1222,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
c.value = helper.values_for_operator(value=c.value, op=c.operator)
|
||||
full_args = {**full_args,
|
||||
**_multiple_values(c.value, value_key=e_k)}
|
||||
if c.type == events.EventType.LOCATION.ui_type:
|
||||
if c.type == schemas.EventType.LOCATION:
|
||||
_extra_or_condition.append(
|
||||
_multiple_conditions(f"extra_event.url_path {op} %({e_k})s",
|
||||
c.value, value_key=e_k))
|
||||
|
|
@ -1359,18 +1359,15 @@ def get_user_sessions(project_id, user_id, start_date, end_date):
|
|||
def get_session_user(project_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""\
|
||||
SELECT
|
||||
user_id,
|
||||
count(*) as session_count,
|
||||
max(start_ts) as last_seen,
|
||||
min(start_ts) as first_seen
|
||||
FROM
|
||||
"public".sessions
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
AND user_id = %(userId)s
|
||||
AND duration is not null
|
||||
""" \
|
||||
SELECT user_id,
|
||||
count(*) as session_count,
|
||||
max(start_ts) as last_seen,
|
||||
min(start_ts) as first_seen
|
||||
FROM "public".sessions
|
||||
WHERE project_id = %(project_id)s
|
||||
AND user_id = %(userId)s
|
||||
AND duration is not null
|
||||
GROUP BY user_id;
|
||||
""",
|
||||
{"project_id": project_id, "userId": user_id}
|
||||
|
|
@ -1,269 +0,0 @@
|
|||
import logging
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core.collaborations.collaboration_msteams import MSTeams
|
||||
from chalicelib.core.collaborations.collaboration_slack import Slack
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_note(tenant_id, project_id, user_id, note_id, share=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
|
||||
{",(SELECT name FROM users WHERE user_id=%(share)s AND deleted_at ISNULL) AS share_name" if share else ""}
|
||||
FROM sessions_notes INNER JOIN users USING (user_id)
|
||||
WHERE sessions_notes.project_id = %(project_id)s
|
||||
AND sessions_notes.note_id = %(note_id)s
|
||||
AND sessions_notes.deleted_at IS NULL
|
||||
AND (sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public);""",
|
||||
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id,
|
||||
"note_id": note_id, "share": share})
|
||||
|
||||
cur.execute(query=query)
|
||||
row = cur.fetchone()
|
||||
row = helper.dict_to_camel_case(row)
|
||||
if row:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
row["updatedAt"] = TimeUTC.datetime_to_timestamp(row["updatedAt"])
|
||||
return row
|
||||
|
||||
|
||||
def get_session_notes(tenant_id, project_id, session_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
|
||||
FROM sessions_notes INNER JOIN users USING (user_id)
|
||||
WHERE sessions_notes.project_id = %(project_id)s
|
||||
AND sessions_notes.deleted_at IS NULL
|
||||
AND sessions_notes.session_id = %(session_id)s
|
||||
AND (sessions_notes.user_id = %(user_id)s
|
||||
OR sessions_notes.is_public)
|
||||
ORDER BY created_at DESC;""",
|
||||
{"project_id": project_id, "user_id": user_id,
|
||||
"tenant_id": tenant_id, "session_id": session_id})
|
||||
|
||||
cur.execute(query=query)
|
||||
rows = cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
for row in rows:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
return rows
|
||||
|
||||
|
||||
def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
# base conditions
|
||||
conditions = [
|
||||
"sessions_notes.project_id = %(project_id)s",
|
||||
"sessions_notes.deleted_at IS NULL"
|
||||
]
|
||||
params = {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}
|
||||
|
||||
# tag conditions
|
||||
if data.tags:
|
||||
tag_key = "tag_value"
|
||||
conditions.append(
|
||||
sh.multi_conditions(f"%({tag_key})s = sessions_notes.tag", data.tags, value_key=tag_key)
|
||||
)
|
||||
params.update(sh.multi_values(data.tags, value_key=tag_key))
|
||||
|
||||
# filter by ownership or shared status
|
||||
if data.shared_only:
|
||||
conditions.append("sessions_notes.is_public IS TRUE")
|
||||
elif data.mine_only:
|
||||
conditions.append("sessions_notes.user_id = %(user_id)s")
|
||||
else:
|
||||
conditions.append("(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public)")
|
||||
|
||||
# search condition
|
||||
if data.search:
|
||||
conditions.append("sessions_notes.message ILIKE %(search)s")
|
||||
params["search"] = f"%{data.search}%"
|
||||
|
||||
query = f"""
|
||||
SELECT
|
||||
COUNT(1) OVER () AS full_count,
|
||||
sessions_notes.*,
|
||||
users.name AS user_name
|
||||
FROM
|
||||
sessions_notes
|
||||
INNER JOIN
|
||||
users USING (user_id)
|
||||
WHERE
|
||||
{" AND ".join(conditions)}
|
||||
ORDER BY
|
||||
created_at {data.order}
|
||||
LIMIT
|
||||
%(limit)s OFFSET %(offset)s;
|
||||
"""
|
||||
params.update({
|
||||
"limit": data.limit,
|
||||
"offset": data.limit * (data.page - 1)
|
||||
})
|
||||
|
||||
query = cur.mogrify(query, params)
|
||||
logger.debug(query)
|
||||
cur.execute(query)
|
||||
rows = cur.fetchall()
|
||||
|
||||
result = {"count": 0, "notes": helper.list_to_camel_case(rows)}
|
||||
if rows:
|
||||
result["count"] = rows[0]["fullCount"]
|
||||
for row in rows:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
row.pop("fullCount")
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNoteSchema):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public, thumbnail, start_at, end_at)
|
||||
VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s, %(thumbnail)s, %(start_at)s, %(end_at)s)
|
||||
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""",
|
||||
{"user_id": user_id, "project_id": project_id, "session_id": session_id,
|
||||
**data.model_dump()})
|
||||
cur.execute(query)
|
||||
result = helper.dict_to_camel_case(cur.fetchone())
|
||||
if result:
|
||||
result["createdAt"] = TimeUTC.datetime_to_timestamp(result["createdAt"])
|
||||
return result
|
||||
|
||||
|
||||
def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNoteSchema):
|
||||
sub_query = []
|
||||
if data.message is not None:
|
||||
sub_query.append("message = %(message)s")
|
||||
if data.tag is not None and len(data.tag) > 0:
|
||||
sub_query.append("tag = %(tag)s")
|
||||
if data.is_public is not None:
|
||||
sub_query.append("is_public = %(is_public)s")
|
||||
if data.timestamp is not None:
|
||||
sub_query.append("timestamp = %(timestamp)s")
|
||||
|
||||
sub_query.append("updated_at = timezone('utc'::text, now())")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""UPDATE public.sessions_notes
|
||||
SET
|
||||
{" ,".join(sub_query)}
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
AND user_id = %(user_id)s
|
||||
AND note_id = %(note_id)s
|
||||
AND deleted_at ISNULL
|
||||
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""",
|
||||
{"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.model_dump()})
|
||||
)
|
||||
row = helper.dict_to_camel_case(cur.fetchone())
|
||||
if row:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
return row
|
||||
return {"errors": ["Note not found"]}
|
||||
|
||||
|
||||
def delete(project_id, note_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(""" UPDATE public.sessions_notes
|
||||
SET deleted_at = timezone('utc'::text, now())
|
||||
WHERE note_id = %(note_id)s
|
||||
AND project_id = %(project_id)s
|
||||
AND deleted_at ISNULL;""",
|
||||
{"project_id": project_id, "note_id": note_id})
|
||||
)
|
||||
return {"data": {"state": "success"}}
|
||||
|
||||
|
||||
def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id):
|
||||
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
|
||||
if note is None:
|
||||
return {"errors": ["Note not found"]}
|
||||
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}")
|
||||
if note["timestamp"] > 0:
|
||||
session_url += f"&jumpto={note['timestamp']}"
|
||||
title = f"<{session_url}|Note for session {note['sessionId']}>"
|
||||
|
||||
blocks = [{"type": "section",
|
||||
"fields": [{"type": "mrkdwn",
|
||||
"text": title}]},
|
||||
{"type": "section",
|
||||
"fields": [{"type": "plain_text",
|
||||
"text": note["message"]}]}]
|
||||
if note["tag"]:
|
||||
blocks.append({"type": "context",
|
||||
"elements": [{"type": "plain_text",
|
||||
"text": f"Tag: *{note['tag']}*"}]})
|
||||
bottom = f"Created by {note['userName'].capitalize()}"
|
||||
if user_id != note["userId"]:
|
||||
bottom += f"\nSent by {note['shareName']}: "
|
||||
blocks.append({"type": "context",
|
||||
"elements": [{"type": "plain_text",
|
||||
"text": bottom}]})
|
||||
return Slack.send_raw(
|
||||
tenant_id=tenant_id,
|
||||
webhook_id=webhook_id,
|
||||
body={"blocks": blocks}
|
||||
)
|
||||
|
||||
|
||||
def share_to_msteams(tenant_id, user_id, project_id, note_id, webhook_id):
|
||||
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
|
||||
if note is None:
|
||||
return {"errors": ["Note not found"]}
|
||||
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}")
|
||||
if note["timestamp"] > 0:
|
||||
session_url += f"&jumpto={note['timestamp']}"
|
||||
title = f"[Note for session {note['sessionId']}]({session_url})"
|
||||
|
||||
blocks = [{
|
||||
"type": "TextBlock",
|
||||
"text": title,
|
||||
"style": "heading",
|
||||
"size": "Large"
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"spacing": "Small",
|
||||
"text": note["message"]
|
||||
}
|
||||
]
|
||||
if note["tag"]:
|
||||
blocks.append({"type": "TextBlock",
|
||||
"spacing": "Small",
|
||||
"text": f"Tag: *{note['tag']}*",
|
||||
"size": "Small"})
|
||||
bottom = f"Created by {note['userName'].capitalize()}"
|
||||
if user_id != note["userId"]:
|
||||
bottom += f"\nSent by {note['shareName']}: "
|
||||
blocks.append({"type": "TextBlock",
|
||||
"spacing": "Default",
|
||||
"text": bottom,
|
||||
"size": "Small",
|
||||
"fontType": "Monospace"})
|
||||
return MSTeams.send_raw(
|
||||
tenant_id=tenant_id,
|
||||
webhook_id=webhook_id,
|
||||
body={"type": "message",
|
||||
"attachments": [
|
||||
{"contentType": "application/vnd.microsoft.card.adaptive",
|
||||
"contentUrl": None,
|
||||
"content": {
|
||||
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
|
||||
"type": "AdaptiveCard",
|
||||
"version": "1.5",
|
||||
"body": [{
|
||||
"type": "ColumnSet",
|
||||
"style": "emphasis",
|
||||
"separator": True,
|
||||
"bleed": True,
|
||||
"columns": [{"width": "stretch",
|
||||
"items": blocks,
|
||||
"type": "Column"}]
|
||||
}]}}
|
||||
]})
|
||||
|
|
@ -2,7 +2,8 @@ import logging
|
|||
from typing import List, Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata
|
||||
from chalicelib.core.events import events
|
||||
from chalicelib.core import metadata
|
||||
from . import performance_event
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
|
@ -143,7 +144,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
for e in data.events:
|
||||
if e.type == schemas.EventType.LOCATION:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
|
|
@ -160,7 +161,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
for e in data.events:
|
||||
if e.type == schemas.EventType.REQUEST_DETAILS:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
|
|
@ -273,7 +274,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
return sessions
|
||||
|
||||
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema):
|
||||
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
|
||||
schemas.EventType.GRAPHQL] \
|
||||
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
|
||||
|
|
@ -439,7 +440,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
extra_constraints.append(
|
||||
sh.multi_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not,
|
||||
value_key=f_k))
|
||||
elif filter_type == events.EventType.METADATA.ui_type:
|
||||
elif filter_type == schemas.FilterType.METADATA:
|
||||
# get metadata list only if you need it
|
||||
if meta_keys is None:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
|
|
@ -580,36 +581,36 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
**sh.multi_values(event.value, value_key=e_k),
|
||||
**sh.multi_values(event.source, value_key=s_k)}
|
||||
|
||||
if event_type == events.EventType.CLICK.ui_type:
|
||||
if event_type == schemas.EventType.CLICK:
|
||||
if platform == "web":
|
||||
event_from = event_from % f"{events.EventType.CLICK.table} AS main "
|
||||
event_from = event_from % f"events.clicks AS main "
|
||||
if not is_any:
|
||||
if schemas.ClickEventExtraOperator.has_value(event.operator):
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.selector {op} %({e_k})s", event.value, value_key=e_k))
|
||||
else:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.CLICK.column} {op} %({e_k})s", event.value,
|
||||
sh.multi_conditions(f"main.label {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
else:
|
||||
event_from = event_from % f"{events.EventType.CLICK_MOBILE.table} AS main "
|
||||
event_from = event_from % f"events_ios.taps AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.CLICK_MOBILE.column} {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.label {op} %({e_k})s",
|
||||
event.value,
|
||||
value_key=e_k))
|
||||
|
||||
elif event_type == events.EventType.TAG.ui_type:
|
||||
event_from = event_from % f"{events.EventType.TAG.table} AS main "
|
||||
elif event_type == schemas.EventType.TAG:
|
||||
event_from = event_from % f"events.tags AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.tag_id = %({e_k})s", event.value, value_key=e_k))
|
||||
elif event_type == events.EventType.INPUT.ui_type:
|
||||
elif event_type == schemas.EventType.INPUT:
|
||||
if platform == "web":
|
||||
event_from = event_from % f"{events.EventType.INPUT.table} AS main "
|
||||
event_from = event_from % f"events.inputs AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.INPUT.column} {op} %({e_k})s", event.value,
|
||||
sh.multi_conditions(f"main.label {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
if event.source is not None and len(event.source) > 0:
|
||||
event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source,
|
||||
|
|
@ -617,53 +618,53 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
full_args = {**full_args, **sh.multi_values(event.source, value_key=f"custom{i}")}
|
||||
|
||||
else:
|
||||
event_from = event_from % f"{events.EventType.INPUT_MOBILE.table} AS main "
|
||||
event_from = event_from % f"events_ios.inputs AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.INPUT_MOBILE.column} {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.label {op} %({e_k})s",
|
||||
event.value,
|
||||
value_key=e_k))
|
||||
|
||||
|
||||
elif event_type == events.EventType.LOCATION.ui_type:
|
||||
elif event_type == schemas.EventType.LOCATION:
|
||||
if platform == "web":
|
||||
event_from = event_from % f"{events.EventType.LOCATION.table} AS main "
|
||||
event_from = event_from % f"events.pages AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.path {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
else:
|
||||
event_from = event_from % f"{events.EventType.VIEW_MOBILE.table} AS main "
|
||||
event_from = event_from % f"events_ios.views AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.VIEW_MOBILE.column} {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.name {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
elif event_type == events.EventType.CUSTOM.ui_type:
|
||||
event_from = event_from % f"{events.EventType.CUSTOM.table} AS main "
|
||||
elif event_type == schemas.EventType.CUSTOM:
|
||||
event_from = event_from % f"events_common.customs AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.CUSTOM.column} {op} %({e_k})s", event.value,
|
||||
sh.multi_conditions(f"main.name {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
elif event_type == events.EventType.REQUEST.ui_type:
|
||||
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
|
||||
elif event_type == schemas.EventType.REQUEST:
|
||||
event_from = event_from % f"events_common.requests AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s", event.value,
|
||||
sh.multi_conditions(f"main.path {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
# elif event_type == events.event_type.GRAPHQL.ui_type:
|
||||
# elif event_type == schemas.event_type.GRAPHQL:
|
||||
# event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main "
|
||||
# if not is_any:
|
||||
# event_where.append(
|
||||
# _multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k})s", event.value,
|
||||
# value_key=e_k))
|
||||
elif event_type == events.EventType.STATEACTION.ui_type:
|
||||
event_from = event_from % f"{events.EventType.STATEACTION.table} AS main "
|
||||
elif event_type == schemas.EventType.STATE_ACTION:
|
||||
event_from = event_from % f"events.state_actions AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.STATEACTION.column} {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.name {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
elif event_type == events.EventType.ERROR.ui_type:
|
||||
event_from = event_from % f"{events.EventType.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)"
|
||||
elif event_type == schemas.EventType.ERROR:
|
||||
event_from = event_from % f"events.errors AS main INNER JOIN public.errors AS main1 USING(error_id)"
|
||||
event.source = list(set(event.source))
|
||||
if not is_any and event.value not in [None, "*", ""]:
|
||||
event_where.append(
|
||||
|
|
@ -674,59 +675,59 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
|
||||
|
||||
# ----- Mobile
|
||||
elif event_type == events.EventType.CLICK_MOBILE.ui_type:
|
||||
event_from = event_from % f"{events.EventType.CLICK_MOBILE.table} AS main "
|
||||
elif event_type == schemas.EventType.CLICK_MOBILE:
|
||||
event_from = event_from % f"events_ios.taps AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.CLICK_MOBILE.column} {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.label {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
|
||||
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
|
||||
event_from = event_from % f"{events.EventType.INPUT_MOBILE.table} AS main "
|
||||
elif event_type == schemas.EventType.INPUT_MOBILE:
|
||||
event_from = event_from % f"events_ios.inputs AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.INPUT_MOBILE.column} {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.label {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
if event.source is not None and len(event.source) > 0:
|
||||
event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source,
|
||||
value_key="custom{i}"))
|
||||
full_args = {**full_args, **sh.multi_values(event.source, f"custom{i}")}
|
||||
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
|
||||
event_from = event_from % f"{events.EventType.VIEW_MOBILE.table} AS main "
|
||||
elif event_type == schemas.EventType.VIEW_MOBILE:
|
||||
event_from = event_from % f"events_ios.views AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.VIEW_MOBILE.column} {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.name {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
|
||||
event_from = event_from % f"{events.EventType.CUSTOM_MOBILE.table} AS main "
|
||||
elif event_type == schemas.EventType.CUSTOM_MOBILE:
|
||||
event_from = event_from % f"events_common.customs AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.CUSTOM_MOBILE.column} {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.name {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
elif event_type == events.EventType.REQUEST_MOBILE.ui_type:
|
||||
event_from = event_from % f"{events.EventType.REQUEST_MOBILE.table} AS main "
|
||||
elif event_type == schemas.EventType.REQUEST_MOBILE:
|
||||
event_from = event_from % f"events_common.requests AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.REQUEST_MOBILE.column} {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.path {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
elif event_type == events.EventType.CRASH_MOBILE.ui_type:
|
||||
event_from = event_from % f"{events.EventType.CRASH_MOBILE.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_ios_id)"
|
||||
elif event_type == schemas.EventType.ERROR_MOBILE:
|
||||
event_from = event_from % f"events_common.crashes AS main INNER JOIN public.crashes_ios AS main1 USING(crash_ios_id)"
|
||||
if not is_any and event.value not in [None, "*", ""]:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"(main1.reason {op} %({e_k})s OR main1.name {op} %({e_k})s)",
|
||||
event.value, value_key=e_k))
|
||||
elif event_type == events.EventType.SWIPE_MOBILE.ui_type and platform != "web":
|
||||
event_from = event_from % f"{events.EventType.SWIPE_MOBILE.table} AS main "
|
||||
elif event_type == schemas.EventType.SWIPE_MOBILE and platform != "web":
|
||||
event_from = event_from % f"events_ios.swipes AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.SWIPE_MOBILE.column} {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.label {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
|
||||
elif event_type == schemas.PerformanceEventType.FETCH_FAILED:
|
||||
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
|
||||
event_from = event_from % f"events_common.requests AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.path {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
col = performance_event.get_col(event_type)
|
||||
colname = col["column"]
|
||||
|
|
@ -751,7 +752,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD,
|
||||
schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE
|
||||
]:
|
||||
event_from = event_from % f"{events.EventType.LOCATION.table} AS main "
|
||||
event_from = event_from % f"events.pages AS main "
|
||||
col = performance_event.get_col(event_type)
|
||||
colname = col["column"]
|
||||
tname = "main"
|
||||
|
|
@ -762,7 +763,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
f"{tname}.timestamp <= %(endDate)s"]
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.path {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
e_k += "_custom"
|
||||
full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)}
|
||||
|
|
@ -772,7 +773,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
event.source, value_key=e_k))
|
||||
|
||||
elif event_type == schemas.EventType.REQUEST_DETAILS:
|
||||
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
|
||||
event_from = event_from % f"events_common.requests AS main "
|
||||
apply = False
|
||||
for j, f in enumerate(event.filters):
|
||||
is_any = sh.isAny_opreator(f.operator)
|
||||
|
|
@ -784,7 +785,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
|
||||
if f.type == schemas.FetchFilterType.FETCH_URL:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k_f})s::text",
|
||||
sh.multi_conditions(f"main.path {op} %({e_k_f})s::text",
|
||||
f.value, value_key=e_k_f))
|
||||
apply = True
|
||||
elif f.type == schemas.FetchFilterType.FETCH_STATUS_CODE:
|
||||
|
|
@ -816,7 +817,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
if not apply:
|
||||
continue
|
||||
elif event_type == schemas.EventType.GRAPHQL:
|
||||
event_from = event_from % f"{events.EventType.GRAPHQL.table} AS main "
|
||||
event_from = event_from % f"events.graphql AS main "
|
||||
for j, f in enumerate(event.filters):
|
||||
is_any = sh.isAny_opreator(f.operator)
|
||||
if is_any or len(f.value) == 0:
|
||||
|
|
@ -827,7 +828,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
|
||||
if f.type == schemas.GraphqlFilterType.GRAPHQL_NAME:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value,
|
||||
sh.multi_conditions(f"main.name {op} %({e_k_f})s", f.value,
|
||||
value_key=e_k_f))
|
||||
elif f.type == schemas.GraphqlFilterType.GRAPHQL_METHOD:
|
||||
event_where.append(
|
||||
|
|
@ -908,7 +909,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
# b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')")
|
||||
|
||||
if errors_only:
|
||||
extra_from += f" INNER JOIN {events.EventType.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
|
||||
extra_from += f" INNER JOIN events.errors AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
|
||||
extra_constraints.append("ser.source = 'js_exception'")
|
||||
extra_constraints.append("ser.project_id = %(project_id)s")
|
||||
# if error_status != schemas.ErrorStatus.all:
|
||||
|
|
@ -984,9 +985,9 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
c.value = helper.values_for_operator(value=c.value, op=c.operator)
|
||||
full_args = {**full_args,
|
||||
**sh.multi_values(c.value, value_key=e_k)}
|
||||
if c.type == events.EventType.LOCATION.ui_type:
|
||||
if c.type == schemas.EventType.LOCATION:
|
||||
_extra_or_condition.append(
|
||||
sh.multi_conditions(f"ev.{events.EventType.LOCATION.column} {op} %({e_k})s",
|
||||
sh.multi_conditions(f"ev.path {op} %({e_k})s",
|
||||
c.value, value_key=e_k))
|
||||
else:
|
||||
logger.warning(f"unsupported extra_event type:${c.type}")
|
||||
|
|
@ -1044,18 +1045,15 @@ def get_user_sessions(project_id, user_id, start_date, end_date):
|
|||
def get_session_user(project_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""\
|
||||
SELECT
|
||||
user_id,
|
||||
count(*) as session_count,
|
||||
max(start_ts) as last_seen,
|
||||
min(start_ts) as first_seen
|
||||
FROM
|
||||
"public".sessions
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
AND user_id = %(userId)s
|
||||
AND duration is not null
|
||||
""" \
|
||||
SELECT user_id,
|
||||
count(*) as session_count,
|
||||
max(start_ts) as last_seen,
|
||||
min(start_ts) as first_seen
|
||||
FROM "public".sessions
|
||||
WHERE project_id = %(project_id)s
|
||||
AND user_id = %(userId)s
|
||||
AND duration is not null
|
||||
GROUP BY user_id;
|
||||
""",
|
||||
{"project_id": project_id, "userId": user_id}
|
||||
|
|
@ -1074,11 +1072,10 @@ def count_all():
|
|||
|
||||
def session_exists(project_id, session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify("""SELECT 1
|
||||
FROM public.sessions
|
||||
WHERE session_id=%(session_id)s
|
||||
AND project_id=%(project_id)s
|
||||
LIMIT 1;""",
|
||||
query = cur.mogrify("""SELECT 1
|
||||
FROM public.sessions
|
||||
WHERE session_id = %(session_id)s
|
||||
AND project_id = %(project_id)s LIMIT 1;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
cur.execute(query)
|
||||
row = cur.fetchone()
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import schemas
|
||||
from chalicelib.core import events, metadata, events_mobile, \
|
||||
issues, assist, canvas, user_testing
|
||||
from chalicelib.core import metadata, assist, canvas, user_testing
|
||||
from chalicelib.core.issues import issues
|
||||
from chalicelib.core.events import events, events_mobile
|
||||
from . import sessions_mobs, sessions_devtool
|
||||
from chalicelib.core.errors.modules import errors_helper
|
||||
from chalicelib.utils import pg_client, helper
|
||||
|
|
@ -128,30 +129,8 @@ def get_events(project_id, session_id):
|
|||
data['userTesting'] = user_testing.get_test_signals(session_id=session_id, project_id=project_id)
|
||||
|
||||
data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id)
|
||||
data['issues'] = reduce_issues(data['issues'])
|
||||
data['issues'] = issues.reduce_issues(data['issues'])
|
||||
data['incidents'] = events.get_incidents_by_session_id(session_id=session_id, project_id=project_id)
|
||||
return data
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
# To reduce the number of issues in the replay;
|
||||
# will be removed once we agree on how to show issues
|
||||
def reduce_issues(issues_list):
|
||||
if issues_list is None:
|
||||
return None
|
||||
i = 0
|
||||
# remove same-type issues if the time between them is <2s
|
||||
while i < len(issues_list) - 1:
|
||||
for j in range(i + 1, len(issues_list)):
|
||||
if issues_list[i]["type"] == issues_list[j]["type"]:
|
||||
break
|
||||
else:
|
||||
i += 1
|
||||
break
|
||||
|
||||
if issues_list[i]["timestamp"] - issues_list[j]["timestamp"] < 2000:
|
||||
issues_list.pop(j)
|
||||
else:
|
||||
i += 1
|
||||
|
||||
return issues_list
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import ast
|
||||
import json
|
||||
import logging
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import metadata, projects
|
||||
from chalicelib.core import metadata
|
||||
from chalicelib.utils import helper, ch_client, exp_ch_helper
|
||||
from . import sessions_favorite, sessions_search_legacy, sessions_ch as sessions, sessions_legacy_mobil
|
||||
from chalicelib.utils import pg_client, helper, ch_client, exp_ch_helper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -57,11 +57,14 @@ SESSION_PROJECTION_COLS_CH_MAP = """\
|
|||
"""
|
||||
|
||||
|
||||
def __parse_metadata(metadata_map):
|
||||
return json.loads(metadata_map.replace("'", '"').replace("NULL", 'null'))
|
||||
|
||||
|
||||
# This function executes the query and return result
|
||||
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.ProjectContext,
|
||||
user_id, errors_only=False,
|
||||
error_status=schemas.ErrorStatus.ALL, count_only=False, issue=None, ids_only=False,
|
||||
platform="web"):
|
||||
user_id, errors_only=False, error_status=schemas.ErrorStatus.ALL,
|
||||
count_only=False, issue=None, ids_only=False, metric_of: schemas.MetricOfTable = None):
|
||||
if data.bookmarked:
|
||||
data.startTimestamp, data.endTimestamp = sessions_favorite.get_start_end_timestamp(project.project_id, user_id)
|
||||
if data.startTimestamp is None:
|
||||
|
|
@ -69,20 +72,80 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
return {
|
||||
'total': 0,
|
||||
'sessions': [],
|
||||
'src': 2
|
||||
'_src': 2
|
||||
}
|
||||
# ---------------------- extra filter in order to only select sessions that has been used in the card-table
|
||||
extra_event = None
|
||||
# extra_deduplication = []
|
||||
extra_conditions = None
|
||||
if metric_of == schemas.MetricOfTable.VISITED_URL:
|
||||
extra_event = f"""SELECT DISTINCT ev.session_id,
|
||||
JSONExtractString(toString(ev.`$properties`), 'url_path') AS url_path
|
||||
FROM {exp_ch_helper.get_main_events_table(data.startTimestamp)} AS ev
|
||||
WHERE ev.created_at >= toDateTime(%(startDate)s / 1000)
|
||||
AND ev.created_at <= toDateTime(%(endDate)s / 1000)
|
||||
AND ev.project_id = %(project_id)s
|
||||
AND ev.`$event_name` = 'LOCATION'"""
|
||||
# extra_deduplication.append("url_path")
|
||||
extra_conditions = {}
|
||||
for e in data.events:
|
||||
if e.type == schemas.EventType.LOCATION:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema(**{
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
"operator": e.operator,
|
||||
"filters": e.filters
|
||||
})
|
||||
for v in e.value:
|
||||
if v not in extra_conditions[e.operator].value:
|
||||
extra_conditions[e.operator].value.append(v)
|
||||
extra_conditions = list(extra_conditions.values())
|
||||
elif metric_of == schemas.MetricOfTable.FETCH:
|
||||
extra_event = f"""SELECT DISTINCT ev.session_id
|
||||
FROM {exp_ch_helper.get_main_events_table(data.startTimestamp)} AS ev
|
||||
WHERE ev.created_at >= toDateTime(%(startDate)s / 1000)
|
||||
AND ev.created_at <= toDateTime(%(endDate)s / 1000)
|
||||
AND ev.project_id = %(project_id)s
|
||||
AND ev.`$event_name` = 'REQUEST'"""
|
||||
|
||||
# extra_deduplication.append("url_path")
|
||||
extra_conditions = {}
|
||||
for e in data.events:
|
||||
if e.type == schemas.EventType.REQUEST_DETAILS:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema(**{
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
"operator": e.operator,
|
||||
"filters": e.filters
|
||||
})
|
||||
for v in e.value:
|
||||
if v not in extra_conditions[e.operator].value:
|
||||
extra_conditions[e.operator].value.append(v)
|
||||
extra_conditions = list(extra_conditions.values())
|
||||
|
||||
# elif metric_of == schemas.MetricOfTable.ISSUES and len(metric_value) > 0:
|
||||
# data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.ISSUE,
|
||||
# operator=schemas.SearchEventOperator.IS))
|
||||
# ----------------------
|
||||
if project.platform == "web":
|
||||
full_args, query_part = sessions.search_query_parts_ch(data=data, error_status=error_status,
|
||||
errors_only=errors_only,
|
||||
favorite_only=data.bookmarked, issue=issue,
|
||||
project_id=project.project_id,
|
||||
user_id=user_id, platform=platform)
|
||||
user_id=user_id, platform=project.platform,
|
||||
extra_event=extra_event,
|
||||
# extra_deduplication=extra_deduplication,
|
||||
extra_conditions=extra_conditions)
|
||||
else:
|
||||
full_args, query_part = sessions_legacy_mobil.search_query_parts_ch(data=data, error_status=error_status,
|
||||
errors_only=errors_only,
|
||||
favorite_only=data.bookmarked, issue=issue,
|
||||
project_id=project.project_id,
|
||||
user_id=user_id, platform=platform)
|
||||
user_id=user_id, platform=project.platform)
|
||||
if data.sort == "startTs":
|
||||
data.sort = "datetime"
|
||||
if data.limit is not None and data.page is not None:
|
||||
|
|
@ -123,7 +186,8 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
|
||||
meta_keys = metadata.get(project_id=project.project_id)
|
||||
meta_map = ",map(%s) AS 'metadata'" \
|
||||
% ','.join([f"'{m['key']}',coalesce(metadata_{m['index']},'None')" for m in meta_keys])
|
||||
% ','.join(
|
||||
[f"'{m['key']}',coalesce(metadata_{m['index']},CAST(NULL AS Nullable(String)))" for m in meta_keys])
|
||||
main_query = cur.mogrify(f"""SELECT COUNT(*) AS count,
|
||||
COALESCE(JSONB_AGG(users_sessions)
|
||||
FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions
|
||||
|
|
@ -141,7 +205,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
) AS users_sessions;""",
|
||||
full_args)
|
||||
elif ids_only:
|
||||
main_query = cur.format(query=f"""SELECT DISTINCT ON(s.session_id) s.session_id
|
||||
main_query = cur.format(query=f"""SELECT DISTINCT ON(s.session_id) s.session_id AS session_id
|
||||
{query_part}
|
||||
ORDER BY s.session_id desc
|
||||
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s;""",
|
||||
|
|
@ -158,7 +222,8 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
|
||||
meta_keys = metadata.get(project_id=project.project_id)
|
||||
meta_map = ",'metadata',toString(map(%s))" \
|
||||
% ','.join([f"'{m['key']}',coalesce(metadata_{m['index']},'None')" for m in meta_keys])
|
||||
% ','.join(
|
||||
[f"'{m['key']}',coalesce(metadata_{m['index']},CAST(NULL AS Nullable(String)))" for m in meta_keys])
|
||||
main_query = cur.format(query=f"""SELECT any(total) AS count,
|
||||
groupArray(%(sessions_limit)s)(details) AS sessions
|
||||
FROM (SELECT total, details
|
||||
|
|
@ -175,11 +240,11 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
ORDER BY sort_key {data.order}
|
||||
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s) AS sorted_sessions;""",
|
||||
parameters=full_args)
|
||||
logging.debug("--------------------")
|
||||
logging.debug(main_query)
|
||||
logging.debug("--------------------")
|
||||
|
||||
try:
|
||||
logging.debug("--------------------")
|
||||
sessions_list = cur.execute(main_query)
|
||||
logging.debug("--------------------")
|
||||
except Exception as err:
|
||||
logging.warning("--------- SESSIONS-CH SEARCH QUERY EXCEPTION -----------")
|
||||
logging.warning(main_query)
|
||||
|
|
@ -200,83 +265,24 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
for i, s in enumerate(sessions_list):
|
||||
sessions_list[i] = {**s.pop("last_session")[0], **s}
|
||||
sessions_list[i].pop("rn")
|
||||
sessions_list[i]["metadata"] = ast.literal_eval(sessions_list[i]["metadata"])
|
||||
sessions_list[i]["metadata"] = __parse_metadata(sessions_list[i]["metadata"])
|
||||
else:
|
||||
import json
|
||||
for i in range(len(sessions_list)):
|
||||
sessions_list[i]["metadata"] = ast.literal_eval(sessions_list[i]["metadata"])
|
||||
sessions_list[i] = schemas.SessionModel.parse_obj(helper.dict_to_camel_case(sessions_list[i]))
|
||||
sessions_list[i]["metadata"] = __parse_metadata(sessions_list[i]["metadata"])
|
||||
sessions_list[i] = schemas.SessionModel.model_validate(helper.dict_to_camel_case(sessions_list[i]))
|
||||
|
||||
return {
|
||||
'total': total,
|
||||
'sessions': sessions_list,
|
||||
'src': 2
|
||||
'_src': 2
|
||||
}
|
||||
|
||||
|
||||
def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
|
||||
if project_id is None:
|
||||
all_projects = projects.get_projects(tenant_id=tenant_id)
|
||||
else:
|
||||
all_projects = [
|
||||
projects.get_project(tenant_id=tenant_id, project_id=int(project_id), include_last_session=False,
|
||||
include_gdpr=False)]
|
||||
|
||||
all_projects = {int(p["projectId"]): p["name"] for p in all_projects}
|
||||
project_ids = list(all_projects.keys())
|
||||
|
||||
available_keys = metadata.get_keys_by_projects(project_ids)
|
||||
for i in available_keys:
|
||||
available_keys[i]["user_id"] = schemas.FilterType.USER_ID
|
||||
available_keys[i]["user_anonymous_id"] = schemas.FilterType.USER_ANONYMOUS_ID
|
||||
results = {}
|
||||
for i in project_ids:
|
||||
if m_key not in available_keys[i].values():
|
||||
available_keys.pop(i)
|
||||
results[i] = {"total": 0, "sessions": [], "missingMetadata": True}
|
||||
project_ids = list(available_keys.keys())
|
||||
if len(project_ids) > 0:
|
||||
with pg_client.PostgresClient() as cur:
|
||||
sub_queries = []
|
||||
for i in project_ids:
|
||||
col_name = list(available_keys[i].keys())[list(available_keys[i].values()).index(m_key)]
|
||||
sub_queries.append(cur.mogrify(
|
||||
f"(SELECT COALESCE(COUNT(s.*)) AS count FROM public.sessions AS s WHERE s.project_id = %(id)s AND s.{col_name} = %(value)s) AS \"{i}\"",
|
||||
{"id": i, "value": m_value}).decode('UTF-8'))
|
||||
query = f"""SELECT {", ".join(sub_queries)};"""
|
||||
cur.execute(query=query)
|
||||
|
||||
rows = cur.fetchone()
|
||||
|
||||
sub_queries = []
|
||||
for i in rows.keys():
|
||||
results[i] = {"total": rows[i], "sessions": [], "missingMetadata": False, "name": all_projects[int(i)]}
|
||||
if rows[i] > 0:
|
||||
col_name = list(available_keys[int(i)].keys())[list(available_keys[int(i)].values()).index(m_key)]
|
||||
sub_queries.append(
|
||||
cur.mogrify(
|
||||
f"""(
|
||||
SELECT *
|
||||
FROM (
|
||||
SELECT DISTINCT ON(favorite_sessions.session_id, s.session_id) {SESSION_PROJECTION_COLS_CH}
|
||||
FROM public.sessions AS s LEFT JOIN (SELECT session_id
|
||||
FROM public.user_favorite_sessions
|
||||
WHERE user_favorite_sessions.user_id = %(userId)s
|
||||
) AS favorite_sessions USING (session_id)
|
||||
WHERE s.project_id = %(id)s AND s.duration IS NOT NULL AND s.{col_name} = %(value)s
|
||||
) AS full_sessions
|
||||
ORDER BY favorite DESC, issue_score DESC
|
||||
LIMIT 10
|
||||
)""",
|
||||
{"id": i, "value": m_value, "userId": user_id}).decode('UTF-8'))
|
||||
if len(sub_queries) > 0:
|
||||
cur.execute("\nUNION\n".join(sub_queries))
|
||||
rows = cur.fetchall()
|
||||
for i in rows:
|
||||
results[str(i["project_id"])]["sessions"].append(helper.dict_to_camel_case(i))
|
||||
return results
|
||||
return sessions_search_legacy.search_by_metadata(tenant_id, user_id, m_key, m_value, project_id)
|
||||
|
||||
|
||||
# TODO: rewrite this function to use ClickHouse
|
||||
def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 'session_id',
|
||||
ascending: bool = False) -> dict:
|
||||
return sessions_search_legacy.search_sessions_by_ids(project_id, session_ids, sort_by, ascending)
|
||||
|
|
@ -40,7 +40,8 @@ COALESCE((SELECT TRUE
|
|||
# This function executes the query and return result
|
||||
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.ProjectContext,
|
||||
user_id, errors_only=False, error_status=schemas.ErrorStatus.ALL,
|
||||
count_only=False, issue=None, ids_only=False, platform="web"):
|
||||
count_only=False, issue=None, ids_only=False, metric_of: schemas.MetricOfTable = None):
|
||||
platform = project.platform
|
||||
if data.bookmarked:
|
||||
data.startTimestamp, data.endTimestamp = sessions_favorite.get_start_end_timestamp(project.project_id, user_id)
|
||||
if data.startTimestamp is None:
|
||||
|
|
@ -48,7 +49,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
return {
|
||||
'total': 0,
|
||||
'sessions': [],
|
||||
'src': 1
|
||||
'_src': 1
|
||||
}
|
||||
full_args, query_part = sessions_legacy.search_query_parts(data=data, error_status=error_status,
|
||||
errors_only=errors_only,
|
||||
|
|
@ -122,7 +123,10 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
sort = 'session_id'
|
||||
if data.sort is not None and data.sort != "session_id":
|
||||
# sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
|
||||
sort = helper.key_to_snake_case(data.sort)
|
||||
if data.sort == 'datetime':
|
||||
sort = 'start_ts'
|
||||
else:
|
||||
sort = helper.key_to_snake_case(data.sort)
|
||||
|
||||
meta_keys = metadata.get(project_id=project.project_id)
|
||||
main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count,
|
||||
|
|
@ -173,7 +177,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
return {
|
||||
'total': total,
|
||||
'sessions': helper.list_to_camel_case(sessions),
|
||||
'src': 1
|
||||
'_src': 1
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -236,6 +240,7 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
|
|||
cur.execute("\nUNION\n".join(sub_queries))
|
||||
rows = cur.fetchall()
|
||||
for i in rows:
|
||||
i["_src"] = 1
|
||||
results[str(i["project_id"])]["sessions"].append(helper.dict_to_camel_case(i))
|
||||
return results
|
||||
|
||||
|
|
@ -243,7 +248,7 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
|
|||
def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 'session_id',
|
||||
ascending: bool = False) -> dict:
|
||||
if session_ids is None or len(session_ids) == 0:
|
||||
return {"total": 0, "sessions": []}
|
||||
return {"total": 0, "sessions": [], "_src": 1}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
params = {"project_id": project_id, "session_ids": tuple(session_ids)}
|
||||
|
|
@ -262,4 +267,4 @@ def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 's
|
|||
s["metadata"] = {}
|
||||
for m in meta_keys:
|
||||
s["metadata"][m["key"]] = s.pop(f'metadata_{m["index"]}')
|
||||
return {"total": len(rows), "sessions": helper.list_to_camel_case(rows)}
|
||||
return {"total": len(rows), "sessions": helper.list_to_camel_case(rows), "_src": 1}
|
||||
|
|
@ -1 +1,2 @@
|
|||
from .sessions_viewed import *
|
||||
from .sessions_viewed import *
|
||||
from .sessions_viewed_ch import *
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@ async def create_tenant(data: schemas.UserSignupSchema):
|
|||
"spotRefreshToken": r.pop("spotRefreshToken"),
|
||||
"spotRefreshTokenMaxAge": r.pop("spotRefreshTokenMaxAge"),
|
||||
'data': {
|
||||
"scopeState": 0,
|
||||
"scopeState": 2,
|
||||
"user": r
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,9 +11,3 @@ if smtp.has_smtp():
|
|||
logger.info("valid SMTP configuration found")
|
||||
else:
|
||||
logger.info("no SMTP configuration found or SMTP validation failed")
|
||||
|
||||
if config("EXP_CH_DRIVER", cast=bool, default=True):
|
||||
logging.info(">>> Using new CH driver")
|
||||
from . import ch_client_exp as ch_client
|
||||
else:
|
||||
from . import ch_client
|
||||
|
|
|
|||
|
|
@ -1,73 +1,185 @@
|
|||
import logging
|
||||
import threading
|
||||
import time
|
||||
from functools import wraps
|
||||
from queue import Queue, Empty
|
||||
|
||||
import clickhouse_driver
|
||||
import clickhouse_connect
|
||||
from clickhouse_connect.driver.query import QueryContext
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_CH_CONFIG = {"host": config("ch_host"),
|
||||
"user": config("ch_user", default="default"),
|
||||
"password": config("ch_password", default=""),
|
||||
"port": config("ch_port_http", cast=int),
|
||||
"client_name": config("APP_NAME", default="PY")}
|
||||
CH_CONFIG = dict(_CH_CONFIG)
|
||||
|
||||
settings = {}
|
||||
if config('ch_timeout', cast=int, default=-1) > 0:
|
||||
logger.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
|
||||
|
||||
if config('ch_receive_timeout', cast=int, default=-1) > 0:
|
||||
logger.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
|
||||
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
|
||||
|
||||
def transform_result(self, original_function):
|
||||
@wraps(original_function)
|
||||
def wrapper(*args, **kwargs):
|
||||
if kwargs.get("parameters"):
|
||||
if config("LOCAL_DEV", cast=bool, default=False):
|
||||
logger.debug(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters")))
|
||||
else:
|
||||
logger.debug(
|
||||
str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
|
||||
elif len(args) > 0:
|
||||
if config("LOCAL_DEV", cast=bool, default=False):
|
||||
logger.debug(args[0])
|
||||
else:
|
||||
logger.debug(str.encode(args[0]))
|
||||
result = original_function(*args, **kwargs)
|
||||
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
|
||||
column_names = result.column_names
|
||||
result = result.result_rows
|
||||
result = [dict(zip(column_names, row)) for row in result]
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class ClickHouseConnectionPool:
|
||||
def __init__(self, min_size, max_size):
|
||||
self.min_size = min_size
|
||||
self.max_size = max_size
|
||||
self.pool = Queue()
|
||||
self.lock = threading.Lock()
|
||||
self.total_connections = 0
|
||||
|
||||
# Initialize the pool with min_size connections
|
||||
for _ in range(self.min_size):
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.pool.put(client)
|
||||
self.total_connections += 1
|
||||
|
||||
def get_connection(self):
|
||||
try:
|
||||
# Try to get a connection without blocking
|
||||
client = self.pool.get_nowait()
|
||||
return client
|
||||
except Empty:
|
||||
with self.lock:
|
||||
if self.total_connections < self.max_size:
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.total_connections += 1
|
||||
return client
|
||||
# If max_size reached, wait until a connection is available
|
||||
client = self.pool.get()
|
||||
return client
|
||||
|
||||
def release_connection(self, client):
|
||||
self.pool.put(client)
|
||||
|
||||
def close_all(self):
|
||||
with self.lock:
|
||||
while not self.pool.empty():
|
||||
client = self.pool.get()
|
||||
client.close()
|
||||
self.total_connections = 0
|
||||
|
||||
|
||||
CH_pool: ClickHouseConnectionPool = None
|
||||
|
||||
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
|
||||
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
|
||||
RETRY = 0
|
||||
|
||||
|
||||
def make_pool():
|
||||
if not config('CH_POOL', cast=bool, default=True):
|
||||
return
|
||||
global CH_pool
|
||||
global RETRY
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
try:
|
||||
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
|
||||
max_size=config("CH_MAXCONN", cast=int, default=8))
|
||||
if CH_pool is not None:
|
||||
logger.info("Connection pool created successfully for CH")
|
||||
except ConnectionError as error:
|
||||
logger.error("Error while connecting to CH", exc_info=error)
|
||||
if RETRY < RETRY_MAX:
|
||||
RETRY += 1
|
||||
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
time.sleep(RETRY_INTERVAL)
|
||||
make_pool()
|
||||
else:
|
||||
raise error
|
||||
|
||||
|
||||
class ClickHouseClient:
|
||||
__client = None
|
||||
|
||||
def __init__(self, database=None):
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
self.__client = clickhouse_driver.Client(host=config("ch_host"),
|
||||
database=database if database else config("ch_database",
|
||||
default="default"),
|
||||
user=config("ch_user", default="default"),
|
||||
password=config("ch_password", default=""),
|
||||
port=config("ch_port", cast=int),
|
||||
settings=settings,
|
||||
**extra_args) \
|
||||
if self.__client is None else self.__client
|
||||
if self.__client is None:
|
||||
if database is not None or not config('CH_POOL', cast=bool, default=True):
|
||||
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=database if database else config("ch_database",
|
||||
default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
|
||||
else:
|
||||
self.__client = CH_pool.get_connection()
|
||||
|
||||
self.__client.execute = transform_result(self, self.__client.query)
|
||||
self.__client.format = self.format
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def execute(self, query, parameters=None, **args):
|
||||
try:
|
||||
results = self.__client.execute(query=query, params=parameters, with_column_types=True, **args)
|
||||
keys = tuple(x for x, y in results[1])
|
||||
return [dict(zip(keys, i)) for i in results[0]]
|
||||
except Exception as err:
|
||||
logger.error("--------- CH EXCEPTION -----------", exc_info=err)
|
||||
logger.error("--------- CH QUERY EXCEPTION -----------")
|
||||
logger.error(self.format(query=query, parameters=parameters)
|
||||
.replace('\n', '\\n')
|
||||
.replace(' ', ' ')
|
||||
.replace(' ', ' '))
|
||||
logger.error("--------------------")
|
||||
raise err
|
||||
|
||||
def insert(self, query, params=None, **args):
|
||||
return self.__client.execute(query=query, params=params, **args)
|
||||
|
||||
def client(self):
|
||||
return self.__client
|
||||
|
||||
def format(self, query, parameters):
|
||||
if parameters is None:
|
||||
return query
|
||||
return self.__client.substitute_params(query, parameters, self.__client.connection.context)
|
||||
def format(self, query, parameters=None):
|
||||
if parameters:
|
||||
ctx = QueryContext(query=query, parameters=parameters)
|
||||
return ctx.final_query
|
||||
return query
|
||||
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
CH_pool.release_connection(self.__client)
|
||||
else:
|
||||
self.__client.close()
|
||||
|
||||
|
||||
async def init():
|
||||
logger.info(f">CH_POOL:not defined")
|
||||
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
make_pool()
|
||||
|
||||
|
||||
async def terminate():
|
||||
pass
|
||||
global CH_pool
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
logger.info("Closed all connexions to CH")
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
|
|
|
|||
|
|
@ -1,177 +0,0 @@
|
|||
import logging
|
||||
import threading
|
||||
import time
|
||||
from functools import wraps
|
||||
from queue import Queue, Empty
|
||||
|
||||
import clickhouse_connect
|
||||
from clickhouse_connect.driver.query import QueryContext
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_CH_CONFIG = {"host": config("ch_host"),
|
||||
"user": config("ch_user", default="default"),
|
||||
"password": config("ch_password", default=""),
|
||||
"port": config("ch_port_http", cast=int),
|
||||
"client_name": config("APP_NAME", default="PY")}
|
||||
CH_CONFIG = dict(_CH_CONFIG)
|
||||
|
||||
settings = {}
|
||||
if config('ch_timeout', cast=int, default=-1) > 0:
|
||||
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
|
||||
|
||||
if config('ch_receive_timeout', cast=int, default=-1) > 0:
|
||||
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
|
||||
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
|
||||
|
||||
def transform_result(self, original_function):
|
||||
@wraps(original_function)
|
||||
def wrapper(*args, **kwargs):
|
||||
logger.debug(str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
|
||||
result = original_function(*args, **kwargs)
|
||||
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
|
||||
column_names = result.column_names
|
||||
result = result.result_rows
|
||||
result = [dict(zip(column_names, row)) for row in result]
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class ClickHouseConnectionPool:
|
||||
def __init__(self, min_size, max_size):
|
||||
self.min_size = min_size
|
||||
self.max_size = max_size
|
||||
self.pool = Queue()
|
||||
self.lock = threading.Lock()
|
||||
self.total_connections = 0
|
||||
|
||||
# Initialize the pool with min_size connections
|
||||
for _ in range(self.min_size):
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.pool.put(client)
|
||||
self.total_connections += 1
|
||||
|
||||
def get_connection(self):
|
||||
try:
|
||||
# Try to get a connection without blocking
|
||||
client = self.pool.get_nowait()
|
||||
return client
|
||||
except Empty:
|
||||
with self.lock:
|
||||
if self.total_connections < self.max_size:
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.total_connections += 1
|
||||
return client
|
||||
# If max_size reached, wait until a connection is available
|
||||
client = self.pool.get()
|
||||
return client
|
||||
|
||||
def release_connection(self, client):
|
||||
self.pool.put(client)
|
||||
|
||||
def close_all(self):
|
||||
with self.lock:
|
||||
while not self.pool.empty():
|
||||
client = self.pool.get()
|
||||
client.close()
|
||||
self.total_connections = 0
|
||||
|
||||
|
||||
CH_pool: ClickHouseConnectionPool = None
|
||||
|
||||
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
|
||||
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
|
||||
RETRY = 0
|
||||
|
||||
|
||||
def make_pool():
|
||||
if not config('CH_POOL', cast=bool, default=True):
|
||||
return
|
||||
global CH_pool
|
||||
global RETRY
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
try:
|
||||
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
|
||||
max_size=config("CH_MAXCONN", cast=int, default=8))
|
||||
if CH_pool is not None:
|
||||
logger.info("Connection pool created successfully for CH")
|
||||
except ConnectionError as error:
|
||||
logger.error("Error while connecting to CH", exc_info=error)
|
||||
if RETRY < RETRY_MAX:
|
||||
RETRY += 1
|
||||
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
time.sleep(RETRY_INTERVAL)
|
||||
make_pool()
|
||||
else:
|
||||
raise error
|
||||
|
||||
|
||||
class ClickHouseClient:
|
||||
__client = None
|
||||
|
||||
def __init__(self, database=None):
|
||||
if self.__client is None:
|
||||
if database is not None or not config('CH_POOL', cast=bool, default=True):
|
||||
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=database if database else config("ch_database",
|
||||
default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
|
||||
else:
|
||||
self.__client = CH_pool.get_connection()
|
||||
|
||||
self.__client.execute = transform_result(self, self.__client.query)
|
||||
self.__client.format = self.format
|
||||
|
||||
def __enter__(self):
|
||||
return self.__client
|
||||
|
||||
def format(self, query, *, parameters=None):
|
||||
if parameters is None:
|
||||
return query
|
||||
return query % {
|
||||
key: f"'{value}'" if isinstance(value, str) else value
|
||||
for key, value in parameters.items()
|
||||
}
|
||||
|
||||
def __exit__(self, *args):
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
CH_pool.release_connection(self.__client)
|
||||
else:
|
||||
self.__client.close()
|
||||
|
||||
|
||||
async def init():
|
||||
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
make_pool()
|
||||
|
||||
|
||||
async def terminate():
|
||||
global CH_pool
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
logger.info("Closed all connexions to CH")
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
|
|
@ -1,7 +1,14 @@
|
|||
from typing import Union
|
||||
import logging
|
||||
import math
|
||||
import re
|
||||
import struct
|
||||
from decimal import Decimal
|
||||
from typing import Union, Any
|
||||
|
||||
import schemas
|
||||
import logging
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from schemas import SearchEventOperator
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -50,7 +57,8 @@ def get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEvent
|
|||
schemas.EventType.ERROR: "ERROR",
|
||||
schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD: 'PERFORMANCE',
|
||||
schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE: 'PERFORMANCE',
|
||||
schemas.FetchFilterType.FETCH_URL: 'REQUEST'
|
||||
schemas.FetchFilterType.FETCH_URL: 'REQUEST',
|
||||
schemas.EventType.INCIDENT: "INCIDENT",
|
||||
}
|
||||
defs_mobile = {
|
||||
schemas.EventType.CLICK_MOBILE: "TAP",
|
||||
|
|
@ -59,10 +67,183 @@ def get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEvent
|
|||
schemas.EventType.REQUEST_MOBILE: "REQUEST",
|
||||
schemas.EventType.ERROR_MOBILE: "CRASH",
|
||||
schemas.EventType.VIEW_MOBILE: "VIEW",
|
||||
schemas.EventType.SWIPE_MOBILE: "SWIPE"
|
||||
schemas.EventType.SWIPE_MOBILE: "SWIPE",
|
||||
schemas.EventType.INCIDENT: "INCIDENT"
|
||||
}
|
||||
if platform != "web" and event_type in defs_mobile:
|
||||
return defs_mobile.get(event_type)
|
||||
if event_type not in defs:
|
||||
raise Exception(f"unsupported EventType:{event_type}")
|
||||
return defs.get(event_type)
|
||||
|
||||
|
||||
# AI generated
|
||||
def simplify_clickhouse_type(ch_type: str) -> str:
|
||||
"""
|
||||
Simplify a ClickHouse data type name to a broader category like:
|
||||
int, float, decimal, datetime, string, uuid, enum, array, tuple, map, nested, etc.
|
||||
"""
|
||||
|
||||
# 1) Strip out common wrappers like Nullable(...) or LowCardinality(...)
|
||||
# Possibly multiple wrappers: e.g. "LowCardinality(Nullable(Int32))"
|
||||
pattern_wrappers = re.compile(r'(Nullable|LowCardinality)\((.*)\)')
|
||||
while True:
|
||||
match = pattern_wrappers.match(ch_type)
|
||||
if match:
|
||||
ch_type = match.group(2)
|
||||
else:
|
||||
break
|
||||
|
||||
# 2) Normalize (lowercase) for easier checks
|
||||
normalized_type = ch_type.lower()
|
||||
|
||||
# 3) Use pattern matching or direct checks for known categories
|
||||
# (You can adapt this as you see fit for your environment.)
|
||||
|
||||
# Integers: Int8, Int16, Int32, Int64, Int128, Int256, UInt8, UInt16, ...
|
||||
if re.match(r'^(u?int)(8|16|32|64|128|256)$', normalized_type):
|
||||
return "int"
|
||||
|
||||
# Floats: Float32, Float64
|
||||
if re.match(r'^float(32|64)|double$', normalized_type):
|
||||
return "float"
|
||||
|
||||
# Decimal: Decimal(P, S)
|
||||
if normalized_type.startswith("decimal"):
|
||||
# return "decimal"
|
||||
return "float"
|
||||
|
||||
# Date/DateTime
|
||||
if normalized_type.startswith("date"):
|
||||
return "datetime"
|
||||
if normalized_type.startswith("datetime"):
|
||||
return "datetime"
|
||||
|
||||
# Strings: String, FixedString(N)
|
||||
if normalized_type.startswith("string"):
|
||||
return "string"
|
||||
if normalized_type.startswith("fixedstring"):
|
||||
return "string"
|
||||
|
||||
# UUID
|
||||
if normalized_type.startswith("uuid"):
|
||||
# return "uuid"
|
||||
return "string"
|
||||
|
||||
# Enums: Enum8(...) or Enum16(...)
|
||||
if normalized_type.startswith("enum8") or normalized_type.startswith("enum16"):
|
||||
# return "enum"
|
||||
return "string"
|
||||
|
||||
# Arrays: Array(T)
|
||||
if normalized_type.startswith("array"):
|
||||
return "array"
|
||||
|
||||
# Tuples: Tuple(T1, T2, ...)
|
||||
if normalized_type.startswith("tuple"):
|
||||
return "tuple"
|
||||
|
||||
# Map(K, V)
|
||||
if normalized_type.startswith("map"):
|
||||
return "map"
|
||||
|
||||
# Nested(...)
|
||||
if normalized_type.startswith("nested"):
|
||||
return "nested"
|
||||
|
||||
# If we didn't match above, just return the original type in lowercase
|
||||
return normalized_type
|
||||
|
||||
|
||||
def simplify_clickhouse_types(ch_types: list[str]) -> list[str]:
|
||||
"""
|
||||
Takes a list of ClickHouse types and returns a list of simplified types
|
||||
by calling `simplify_clickhouse_type` on each.
|
||||
"""
|
||||
return list(set([simplify_clickhouse_type(t) for t in ch_types]))
|
||||
|
||||
|
||||
def get_sub_condition(col_name: str, val_name: str,
|
||||
operator: Union[schemas.SearchEventOperator, schemas.MathOperator]) -> str:
|
||||
if operator == SearchEventOperator.PATTERN:
|
||||
return f"match({col_name}, %({val_name})s)"
|
||||
op = sh.get_sql_operator(operator)
|
||||
return f"{col_name} {op} %({val_name})s"
|
||||
|
||||
|
||||
def get_col_cast(data_type: schemas.PropertyType, value: Any) -> str:
|
||||
if value is None or len(value) == 0:
|
||||
return ""
|
||||
if isinstance(value, list):
|
||||
value = value[0]
|
||||
if data_type in (schemas.PropertyType.INT, schemas.PropertyType.FLOAT):
|
||||
return best_clickhouse_type(value)
|
||||
return data_type.capitalize()
|
||||
|
||||
|
||||
# (type_name, minimum, maximum) – ordered by increasing size
|
||||
_INT_RANGES = [
|
||||
("Int8", -128, 127),
|
||||
("UInt8", 0, 255),
|
||||
("Int16", -32_768, 32_767),
|
||||
("UInt16", 0, 65_535),
|
||||
("Int32", -2_147_483_648, 2_147_483_647),
|
||||
("UInt32", 0, 4_294_967_295),
|
||||
("Int64", -9_223_372_036_854_775_808, 9_223_372_036_854_775_807),
|
||||
("UInt64", 0, 18_446_744_073_709_551_615),
|
||||
]
|
||||
|
||||
|
||||
def best_clickhouse_type(value):
|
||||
"""
|
||||
Return the most compact ClickHouse numeric type that can store *value* loss-lessly.
|
||||
|
||||
"""
|
||||
# Treat bool like tiny int
|
||||
if isinstance(value, bool):
|
||||
value = int(value)
|
||||
|
||||
# --- Integers ---
|
||||
if isinstance(value, int):
|
||||
for name, lo, hi in _INT_RANGES:
|
||||
if lo <= value <= hi:
|
||||
return name
|
||||
# Beyond UInt64: ClickHouse offers Int128 / Int256 or Decimal
|
||||
return "Int128"
|
||||
|
||||
# --- Decimal.Decimal (exact) ---
|
||||
if isinstance(value, Decimal):
|
||||
# ClickHouse Decimal32/64/128 have 9 / 18 / 38 significant digits.
|
||||
digits = len(value.as_tuple().digits)
|
||||
if digits <= 9:
|
||||
return "Decimal32"
|
||||
elif digits <= 18:
|
||||
return "Decimal64"
|
||||
else:
|
||||
return "Decimal128"
|
||||
|
||||
# --- Floats ---
|
||||
if isinstance(value, float):
|
||||
if not math.isfinite(value):
|
||||
return "Float64" # inf / nan → always Float64
|
||||
|
||||
# Check if a round-trip through 32-bit float preserves the bit pattern
|
||||
packed = struct.pack("f", value)
|
||||
if struct.unpack("f", packed)[0] == value:
|
||||
return "Float32"
|
||||
return "Float64"
|
||||
|
||||
raise TypeError(f"Unsupported type: {type(value).__name__}")
|
||||
|
||||
|
||||
def explode_dproperties(rows):
|
||||
for i in range(len(rows)):
|
||||
rows[i] = {**rows[i], **rows[i]["$properties"]}
|
||||
rows[i].pop("$properties")
|
||||
return rows
|
||||
|
||||
|
||||
def add_timestamp(rows):
|
||||
for row in rows:
|
||||
row["timestamp"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
return rows
|
||||
|
|
|
|||
|
|
@ -15,11 +15,11 @@ def random_string(length=36):
|
|||
return "".join(random.choices(string.hexdigits, k=length))
|
||||
|
||||
|
||||
def list_to_camel_case(items: list[dict], flatten: bool = False) -> list[dict]:
|
||||
def list_to_camel_case(items: list[dict], flatten: bool = False, ignore_keys=[]) -> list[dict]:
|
||||
for i in range(len(items)):
|
||||
if flatten:
|
||||
items[i] = flatten_nested_dicts(items[i])
|
||||
items[i] = dict_to_camel_case(items[i])
|
||||
items[i] = dict_to_camel_case(items[i], ignore_keys=[])
|
||||
|
||||
return items
|
||||
|
||||
|
|
@ -99,6 +99,8 @@ def allow_captcha():
|
|||
|
||||
|
||||
def string_to_sql_like(value):
|
||||
if value is None:
|
||||
return None
|
||||
value = re.sub(' +', ' ', value)
|
||||
value = value.replace("*", "%")
|
||||
if value.startswith("^"):
|
||||
|
|
@ -334,5 +336,3 @@ def cast_session_id_to_string(data):
|
|||
for key in keys:
|
||||
data[key] = cast_session_id_to_string(data[key])
|
||||
return data
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1 +0,0 @@
|
|||
from .or_cache import CachedResponse
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
import functools
|
||||
import inspect
|
||||
import json
|
||||
import logging
|
||||
from chalicelib.utils import pg_client
|
||||
import time
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CachedResponse:
|
||||
def __init__(self, table, ttl):
|
||||
self.table = table
|
||||
self.ttl = ttl
|
||||
|
||||
def __call__(self, func):
|
||||
self.param_names = {i: param for i, param in enumerate(inspect.signature(func).parameters)}
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
values = dict()
|
||||
for i, param in self.param_names.items():
|
||||
if i < len(args):
|
||||
values[param] = args[i]
|
||||
elif param in kwargs:
|
||||
values[param] = kwargs[param]
|
||||
else:
|
||||
values[param] = None
|
||||
result = self.__get(values)
|
||||
if result is None or result["expired"] \
|
||||
or result["result"] is None or len(result["result"]) == 0:
|
||||
now = time.time()
|
||||
result = func(*args, **kwargs)
|
||||
now = time.time() - now
|
||||
if result is not None and len(result) > 0:
|
||||
self.__add(values, result, now)
|
||||
result[0]["cached"] = False
|
||||
else:
|
||||
logger.info(f"using cached response for "
|
||||
f"{func.__name__}({','.join([f'{key}={val}' for key, val in enumerate(values)])})")
|
||||
result = result["result"]
|
||||
result[0]["cached"] = True
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
def __get(self, values):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
sub_constraints = []
|
||||
for key, value in values.items():
|
||||
if value is not None:
|
||||
sub_constraints.append(f"{key}=%({key})s")
|
||||
else:
|
||||
sub_constraints.append(f"{key} IS NULL")
|
||||
query = f"""SELECT result,
|
||||
(%(ttl)s>0
|
||||
AND EXTRACT(EPOCH FROM (timezone('utc'::text, now()) - created_at - INTERVAL %(interval)s)) > 0) AS expired
|
||||
FROM {self.table}
|
||||
WHERE {" AND ".join(sub_constraints)}"""
|
||||
query = cur.mogrify(query, {**values, 'ttl': self.ttl, 'interval': f'{self.ttl} seconds'})
|
||||
logger.debug("------")
|
||||
logger.debug(query)
|
||||
logger.debug("------")
|
||||
cur.execute(query)
|
||||
result = cur.fetchone()
|
||||
return result
|
||||
|
||||
def __add(self, values, result, execution_time):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = f"""INSERT INTO {self.table} ({",".join(values.keys())},result,execution_time)
|
||||
VALUES ({",".join([f"%({param})s" for param in values.keys()])},%(result)s,%(execution_time)s)
|
||||
ON CONFLICT ({",".join(values.keys())}) DO UPDATE SET result=%(result)s,
|
||||
execution_time=%(execution_time)s,
|
||||
created_at=timezone('utc'::text, now());"""
|
||||
query = cur.mogrify(query, {**values,
|
||||
"result": json.dumps(jsonable_encoder(result)),
|
||||
"execution_time": execution_time})
|
||||
logger.debug("------")
|
||||
logger.debug(query)
|
||||
logger.debug("------")
|
||||
cur.execute(query)
|
||||
|
|
@ -14,6 +14,9 @@ def get_sql_operator(op: Union[schemas.SearchEventOperator, schemas.ClickEventEx
|
|||
schemas.SearchEventOperator.NOT_CONTAINS: "NOT ILIKE",
|
||||
schemas.SearchEventOperator.STARTS_WITH: "ILIKE",
|
||||
schemas.SearchEventOperator.ENDS_WITH: "ILIKE",
|
||||
# this is not used as an operator, it is used in order to maintain a valid value for conditions
|
||||
schemas.SearchEventOperator.PATTERN: "regex",
|
||||
|
||||
# Selector operators:
|
||||
schemas.ClickEventExtraOperator.IS: "=",
|
||||
schemas.ClickEventExtraOperator.IS_NOT: "!=",
|
||||
|
|
@ -41,7 +44,7 @@ def reverse_sql_operator(op):
|
|||
return "=" if op == "!=" else "!=" if op == "=" else "ILIKE" if op == "NOT ILIKE" else "NOT ILIKE"
|
||||
|
||||
|
||||
def multi_conditions(condition, values, value_key="value", is_not=False):
|
||||
def multi_conditions(condition, values, value_key="value", is_not=False) -> str:
|
||||
query = []
|
||||
for i in range(len(values)):
|
||||
k = f"{value_key}_{i}"
|
||||
|
|
@ -49,12 +52,16 @@ def multi_conditions(condition, values, value_key="value", is_not=False):
|
|||
return "(" + (" AND " if is_not else " OR ").join(query) + ")"
|
||||
|
||||
|
||||
def multi_values(values, value_key="value"):
|
||||
def multi_values(values, value_key="value", data_type: schemas.PropertyType | None = None):
|
||||
query_values = {}
|
||||
if values is not None and isinstance(values, list):
|
||||
for i in range(len(values)):
|
||||
k = f"{value_key}_{i}"
|
||||
query_values[k] = values[i].value if isinstance(values[i], Enum) else values[i]
|
||||
if data_type:
|
||||
if data_type == schemas.PropertyType.STRING:
|
||||
query_values[k] = str(query_values[k])
|
||||
|
||||
return query_values
|
||||
|
||||
|
||||
|
|
@ -73,3 +80,29 @@ def single_value(values):
|
|||
values[i] = v.value
|
||||
return values
|
||||
|
||||
|
||||
def coordinate_conditions(condition_x, condition_y, values, value_key="value", is_not=False):
|
||||
query = []
|
||||
if len(values) == 2:
|
||||
# if 2 values are provided, it means x=v[0] and y=v[1]
|
||||
for i in range(len(values)):
|
||||
k = f"{value_key}_{i}"
|
||||
if i == 0:
|
||||
query.append(f"{condition_x}=%({k})s")
|
||||
elif i == 1:
|
||||
query.append(f"{condition_y}=%({k})s")
|
||||
|
||||
elif len(values) == 4:
|
||||
# if 4 values are provided, it means v[0]<=x<=v[1] and v[2]<=y<=v[3]
|
||||
for i in range(len(values)):
|
||||
k = f"{value_key}_{i}"
|
||||
if i == 0:
|
||||
query.append(f"{condition_x}>=%({k})s")
|
||||
elif i == 1:
|
||||
query.append(f"{condition_x}<=%({k})s")
|
||||
elif i == 2:
|
||||
query.append(f"{condition_y}>=%({k})s")
|
||||
elif i == 3:
|
||||
query.append(f"{condition_y}<=%({k})s")
|
||||
|
||||
return "(" + (" AND " if is_not else " OR ").join(query) + ")"
|
||||
|
|
|
|||
|
|
@ -74,4 +74,6 @@ EXP_CH_DRIVER=true
|
|||
EXP_AUTOCOMPLETE=true
|
||||
EXP_ALERTS=true
|
||||
EXP_ERRORS_SEARCH=true
|
||||
EXP_METRICS=true
|
||||
EXP_METRICS=true
|
||||
EXP_SESSIONS_SEARCH=true
|
||||
EXP_EVENTS=true
|
||||
|
|
@ -68,4 +68,5 @@ EXP_CH_DRIVER=true
|
|||
EXP_AUTOCOMPLETE=true
|
||||
EXP_ALERTS=true
|
||||
EXP_ERRORS_SEARCH=true
|
||||
EXP_METRICS=true
|
||||
EXP_METRICS=true
|
||||
EXP_EVENTS=true
|
||||
|
|
@ -1,591 +0,0 @@
|
|||
-- -- Original Q3
|
||||
-- WITH ranked_events AS (SELECT *
|
||||
-- FROM ranked_events_1736344377403),
|
||||
-- n1 AS (SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM ranked_events
|
||||
-- WHERE event_number_in_session = 1
|
||||
-- AND isNotNull(next_value)
|
||||
-- GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8),
|
||||
-- n2 AS (SELECT *
|
||||
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
|
||||
-- re.event_type AS event_type,
|
||||
-- re.e_value AS e_value,
|
||||
-- re.next_type AS next_type,
|
||||
-- re.next_value AS next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM n1
|
||||
-- INNER JOIN ranked_events AS re
|
||||
-- ON (n1.next_value = re.e_value AND n1.next_type = re.event_type)
|
||||
-- WHERE re.event_number_in_session = 2
|
||||
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
|
||||
-- re.next_value) AS sub_level
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8),
|
||||
-- n3 AS (SELECT *
|
||||
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
|
||||
-- re.event_type AS event_type,
|
||||
-- re.e_value AS e_value,
|
||||
-- re.next_type AS next_type,
|
||||
-- re.next_value AS next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM n2
|
||||
-- INNER JOIN ranked_events AS re
|
||||
-- ON (n2.next_value = re.e_value AND n2.next_type = re.event_type)
|
||||
-- WHERE re.event_number_in_session = 3
|
||||
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
|
||||
-- re.next_value) AS sub_level
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8),
|
||||
-- n4 AS (SELECT *
|
||||
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
|
||||
-- re.event_type AS event_type,
|
||||
-- re.e_value AS e_value,
|
||||
-- re.next_type AS next_type,
|
||||
-- re.next_value AS next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM n3
|
||||
-- INNER JOIN ranked_events AS re
|
||||
-- ON (n3.next_value = re.e_value AND n3.next_type = re.event_type)
|
||||
-- WHERE re.event_number_in_session = 4
|
||||
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
|
||||
-- re.next_value) AS sub_level
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8),
|
||||
-- n5 AS (SELECT *
|
||||
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
|
||||
-- re.event_type AS event_type,
|
||||
-- re.e_value AS e_value,
|
||||
-- re.next_type AS next_type,
|
||||
-- re.next_value AS next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM n4
|
||||
-- INNER JOIN ranked_events AS re
|
||||
-- ON (n4.next_value = re.e_value AND n4.next_type = re.event_type)
|
||||
-- WHERE re.event_number_in_session = 5
|
||||
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
|
||||
-- re.next_value) AS sub_level
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8)
|
||||
-- SELECT *
|
||||
-- FROM (SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n1
|
||||
-- UNION ALL
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n2
|
||||
-- UNION ALL
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n3
|
||||
-- UNION ALL
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n4
|
||||
-- UNION ALL
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n5) AS chart_steps
|
||||
-- ORDER BY event_number_in_session;
|
||||
|
||||
-- Q1
|
||||
-- CREATE TEMPORARY TABLE pre_ranked_events_1736344377403 AS
|
||||
CREATE TABLE pre_ranked_events_1736344377403 ENGINE = Memory AS
|
||||
(WITH initial_event AS (SELECT events.session_id, MIN(datetime) AS start_event_timestamp
|
||||
FROM experimental.events AS events
|
||||
WHERE ((event_type = 'LOCATION' AND (url_path = '/en/deployment/')))
|
||||
AND events.project_id = toUInt16(65)
|
||||
AND events.datetime >= toDateTime(1735599600000 / 1000)
|
||||
AND events.datetime < toDateTime(1736290799999 / 1000)
|
||||
GROUP BY 1),
|
||||
pre_ranked_events AS (SELECT *
|
||||
FROM (SELECT session_id,
|
||||
event_type,
|
||||
datetime,
|
||||
url_path AS e_value,
|
||||
row_number() OVER (PARTITION BY session_id
|
||||
ORDER BY datetime ,
|
||||
message_id ) AS event_number_in_session
|
||||
FROM experimental.events AS events
|
||||
INNER JOIN initial_event ON (events.session_id = initial_event.session_id)
|
||||
WHERE events.project_id = toUInt16(65)
|
||||
AND events.datetime >= toDateTime(1735599600000 / 1000)
|
||||
AND events.datetime < toDateTime(1736290799999 / 1000)
|
||||
AND (events.event_type = 'LOCATION')
|
||||
AND events.datetime >= initial_event.start_event_timestamp
|
||||
) AS full_ranked_events
|
||||
WHERE event_number_in_session <= 5)
|
||||
SELECT *
|
||||
FROM pre_ranked_events);
|
||||
;
|
||||
|
||||
SELECT *
|
||||
FROM pre_ranked_events_1736344377403
|
||||
WHERE event_number_in_session < 3;
|
||||
|
||||
|
||||
|
||||
-- ---------Q2-----------
|
||||
-- CREATE TEMPORARY TABLE ranked_events_1736344377403 AS
|
||||
DROP TABLE ranked_events_1736344377403;
|
||||
CREATE TABLE ranked_events_1736344377403 ENGINE = Memory AS
|
||||
(WITH pre_ranked_events AS (SELECT *
|
||||
FROM pre_ranked_events_1736344377403),
|
||||
start_points AS (SELECT DISTINCT session_id
|
||||
FROM pre_ranked_events
|
||||
WHERE ((event_type = 'LOCATION' AND (e_value = '/en/deployment/')))
|
||||
AND pre_ranked_events.event_number_in_session = 1),
|
||||
ranked_events AS (SELECT pre_ranked_events.*,
|
||||
leadInFrame(e_value)
|
||||
OVER (PARTITION BY session_id ORDER BY datetime
|
||||
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS next_value,
|
||||
leadInFrame(toNullable(event_type))
|
||||
OVER (PARTITION BY session_id ORDER BY datetime
|
||||
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS next_type
|
||||
FROM start_points
|
||||
INNER JOIN pre_ranked_events USING (session_id))
|
||||
SELECT *
|
||||
FROM ranked_events);
|
||||
|
||||
|
||||
-- ranked events
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events_1736344377403
|
||||
WHERE event_number_in_session = 2
|
||||
-- AND e_value='/en/deployment/deploy-docker/'
|
||||
-- AND next_value NOT IN ('/en/deployment/','/en/plugins/','/en/using-or/')
|
||||
-- AND e_value NOT IN ('/en/deployment/deploy-docker/','/en/getting-started/','/en/deployment/deploy-ubuntu/')
|
||||
AND isNotNull(next_value)
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY event_number_in_session, sessions_count DESC;
|
||||
|
||||
|
||||
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events_1736344377403
|
||||
WHERE event_number_in_session = 1
|
||||
GROUP BY event_number_in_session, event_type, e_value
|
||||
ORDER BY event_number_in_session, sessions_count DESC;
|
||||
|
||||
SELECT COUNT(1) AS sessions_count
|
||||
FROM ranked_events_1736344377403
|
||||
WHERE event_number_in_session = 2
|
||||
AND isNull(next_value)
|
||||
;
|
||||
|
||||
-- ---------Q3 MORE -----------
|
||||
WITH ranked_events AS (SELECT *
|
||||
FROM ranked_events_1736344377403),
|
||||
n1 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 1
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
n2 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 2
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
n3 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 3
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
drop_n AS (-- STEP 1
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
sessions_count
|
||||
FROM n1
|
||||
WHERE isNull(n1.next_type)
|
||||
UNION ALL
|
||||
-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
sessions_count
|
||||
FROM n2
|
||||
WHERE isNull(n2.next_type)),
|
||||
-- TODO: make this as top_steps, where every step will go to next as top/others
|
||||
top_n1 AS (-- STEP 1
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count
|
||||
FROM n1
|
||||
WHERE isNotNull(next_type)
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 3),
|
||||
top_n2 AS (-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count
|
||||
FROM n2
|
||||
WHERE (event_type, e_value) IN (SELECT event_type,
|
||||
e_value
|
||||
FROM n2
|
||||
WHERE isNotNull(next_type)
|
||||
GROUP BY event_type, e_value
|
||||
ORDER BY SUM(sessions_count) DESC
|
||||
LIMIT 3)
|
||||
ORDER BY sessions_count DESC),
|
||||
top_n AS (SELECT *
|
||||
FROM top_n1
|
||||
UNION ALL
|
||||
SELECT *
|
||||
FROM top_n2),
|
||||
u_top_n AS (SELECT DISTINCT event_number_in_session,
|
||||
event_type,
|
||||
e_value
|
||||
FROM top_n),
|
||||
others_n AS (
|
||||
-- STEP 1
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count
|
||||
FROM n1
|
||||
WHERE isNotNull(next_type)
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 1000000 OFFSET 3
|
||||
UNION ALL
|
||||
-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count
|
||||
FROM n2
|
||||
WHERE isNotNull(next_type)
|
||||
-- GROUP BY event_number_in_session, event_type, e_value
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 1000000 OFFSET 3)
|
||||
SELECT *
|
||||
FROM (
|
||||
-- Top
|
||||
SELECT *
|
||||
FROM top_n
|
||||
-- UNION ALL
|
||||
-- -- Others
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- 'OTHER' AS next_type,
|
||||
-- NULL AS next_value,
|
||||
-- SUM(sessions_count)
|
||||
-- FROM others_n
|
||||
-- GROUP BY event_number_in_session, event_type, e_value
|
||||
-- UNION ALL
|
||||
-- -- Top go to Drop
|
||||
-- SELECT drop_n.event_number_in_session,
|
||||
-- drop_n.event_type,
|
||||
-- drop_n.e_value,
|
||||
-- drop_n.next_type,
|
||||
-- drop_n.next_value,
|
||||
-- drop_n.sessions_count
|
||||
-- FROM drop_n
|
||||
-- INNER JOIN u_top_n ON (drop_n.event_number_in_session = u_top_n.event_number_in_session
|
||||
-- AND drop_n.event_type = u_top_n.event_type
|
||||
-- AND drop_n.e_value = u_top_n.e_value)
|
||||
-- ORDER BY drop_n.event_number_in_session
|
||||
-- -- -- UNION ALL
|
||||
-- -- -- Top go to Others
|
||||
-- SELECT top_n.event_number_in_session,
|
||||
-- top_n.event_type,
|
||||
-- top_n.e_value,
|
||||
-- 'OTHER' AS next_type,
|
||||
-- NULL AS next_value,
|
||||
-- SUM(top_n.sessions_count) AS sessions_count
|
||||
-- FROM top_n
|
||||
-- LEFT JOIN others_n ON (others_n.event_number_in_session = (top_n.event_number_in_session + 1)
|
||||
-- AND top_n.next_type = others_n.event_type
|
||||
-- AND top_n.next_value = others_n.e_value)
|
||||
-- WHERE others_n.event_number_in_session IS NULL
|
||||
-- AND top_n.next_type IS NOT NULL
|
||||
-- GROUP BY event_number_in_session, event_type, e_value
|
||||
-- UNION ALL
|
||||
-- -- Others got to Top
|
||||
-- SELECT others_n.event_number_in_session,
|
||||
-- 'OTHER' AS event_type,
|
||||
-- NULL AS e_value,
|
||||
-- others_n.s_next_type AS next_type,
|
||||
-- others_n.s_next_value AS next_value,
|
||||
-- SUM(sessions_count) AS sessions_count
|
||||
-- FROM others_n
|
||||
-- INNER JOIN top_n ON (others_n.event_number_in_session = top_n.event_number_in_session + 1 AND
|
||||
-- others_n.s_next_type = top_n.event_type AND
|
||||
-- others_n.s_next_value = top_n.event_type)
|
||||
-- GROUP BY others_n.event_number_in_session, next_type, next_value
|
||||
-- UNION ALL
|
||||
-- -- TODO: find if this works or not
|
||||
-- -- Others got to Others
|
||||
-- SELECT others_n.event_number_in_session,
|
||||
-- 'OTHER' AS event_type,
|
||||
-- NULL AS e_value,
|
||||
-- 'OTHERS' AS next_type,
|
||||
-- NULL AS next_value,
|
||||
-- SUM(sessions_count) AS sessions_count
|
||||
-- FROM others_n
|
||||
-- LEFT JOIN u_top_n ON ((others_n.event_number_in_session + 1) = u_top_n.event_number_in_session
|
||||
-- AND others_n.s_next_type = u_top_n.event_type
|
||||
-- AND others_n.s_next_value = u_top_n.e_value)
|
||||
-- WHERE u_top_n.event_number_in_session IS NULL
|
||||
-- GROUP BY others_n.event_number_in_session
|
||||
)
|
||||
ORDER BY event_number_in_session;
|
||||
|
||||
|
||||
-- ---------Q3 TOP ON VALUE ONLY -----------
|
||||
WITH ranked_events AS (SELECT *
|
||||
FROM ranked_events_1736344377403),
|
||||
n1 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 1
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
n2 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 2
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
n3 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 3
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
|
||||
drop_n AS (-- STEP 1
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
sessions_count
|
||||
FROM n1
|
||||
WHERE isNull(n1.next_type)
|
||||
UNION ALL
|
||||
-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
sessions_count
|
||||
FROM n2
|
||||
WHERE isNull(n2.next_type)),
|
||||
top_n AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
SUM(sessions_count) AS sessions_count
|
||||
FROM n1
|
||||
GROUP BY event_number_in_session, event_type, e_value
|
||||
LIMIT 1
|
||||
UNION ALL
|
||||
-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
SUM(sessions_count) AS sessions_count
|
||||
FROM n2
|
||||
GROUP BY event_number_in_session, event_type, e_value
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 3
|
||||
UNION ALL
|
||||
-- STEP 3
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
SUM(sessions_count) AS sessions_count
|
||||
FROM n3
|
||||
GROUP BY event_number_in_session, event_type, e_value
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 3),
|
||||
top_n_with_next AS (SELECT n1.*
|
||||
FROM n1
|
||||
UNION ALL
|
||||
SELECT n2.*
|
||||
FROM n2
|
||||
INNER JOIN top_n ON (n2.event_number_in_session = top_n.event_number_in_session
|
||||
AND n2.event_type = top_n.event_type
|
||||
AND n2.e_value = top_n.e_value)),
|
||||
others_n AS (
|
||||
-- STEP 2
|
||||
SELECT n2.*
|
||||
FROM n2
|
||||
WHERE (n2.event_number_in_session, n2.event_type, n2.e_value) NOT IN
|
||||
(SELECT event_number_in_session, event_type, e_value
|
||||
FROM top_n
|
||||
WHERE top_n.event_number_in_session = 2)
|
||||
UNION ALL
|
||||
-- STEP 3
|
||||
SELECT n3.*
|
||||
FROM n3
|
||||
WHERE (n3.event_number_in_session, n3.event_type, n3.e_value) NOT IN
|
||||
(SELECT event_number_in_session, event_type, e_value
|
||||
FROM top_n
|
||||
WHERE top_n.event_number_in_session = 3))
|
||||
SELECT *
|
||||
FROM (
|
||||
-- SELECT sum(top_n_with_next.sessions_count)
|
||||
-- FROM top_n_with_next
|
||||
-- WHERE event_number_in_session = 1
|
||||
-- -- AND isNotNull(next_value)
|
||||
-- AND (next_type, next_value) IN
|
||||
-- (SELECT others_n.event_type, others_n.e_value FROM others_n WHERE others_n.event_number_in_session = 2)
|
||||
-- -- SELECT * FROM others_n
|
||||
-- -- SELECT * FROM n2
|
||||
-- SELECT *
|
||||
-- FROM top_n
|
||||
-- );
|
||||
-- Top to Top: valid
|
||||
SELECT top_n_with_next.*
|
||||
FROM top_n_with_next
|
||||
INNER JOIN top_n
|
||||
ON (top_n_with_next.event_number_in_session + 1 = top_n.event_number_in_session
|
||||
AND top_n_with_next.next_type = top_n.event_type
|
||||
AND top_n_with_next.next_value = top_n.e_value)
|
||||
UNION ALL
|
||||
-- Top to Others: valid
|
||||
SELECT top_n_with_next.event_number_in_session,
|
||||
top_n_with_next.event_type,
|
||||
top_n_with_next.e_value,
|
||||
'OTHER' AS next_type,
|
||||
NULL AS next_value,
|
||||
SUM(top_n_with_next.sessions_count) AS sessions_count
|
||||
FROM top_n_with_next
|
||||
WHERE (top_n_with_next.event_number_in_session + 1, top_n_with_next.next_type, top_n_with_next.next_value) IN
|
||||
(SELECT others_n.event_number_in_session, others_n.event_type, others_n.e_value FROM others_n)
|
||||
GROUP BY top_n_with_next.event_number_in_session, top_n_with_next.event_type, top_n_with_next.e_value
|
||||
UNION ALL
|
||||
-- Top go to Drop: valid
|
||||
SELECT drop_n.event_number_in_session,
|
||||
drop_n.event_type,
|
||||
drop_n.e_value,
|
||||
drop_n.next_type,
|
||||
drop_n.next_value,
|
||||
drop_n.sessions_count
|
||||
FROM drop_n
|
||||
INNER JOIN top_n ON (drop_n.event_number_in_session = top_n.event_number_in_session
|
||||
AND drop_n.event_type = top_n.event_type
|
||||
AND drop_n.e_value = top_n.e_value)
|
||||
ORDER BY drop_n.event_number_in_session
|
||||
UNION ALL
|
||||
-- Others got to Drop: valid
|
||||
SELECT others_n.event_number_in_session,
|
||||
'OTHER' AS event_type,
|
||||
NULL AS e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
SUM(others_n.sessions_count) AS sessions_count
|
||||
FROM others_n
|
||||
WHERE isNull(others_n.next_type)
|
||||
AND others_n.event_number_in_session < 3
|
||||
GROUP BY others_n.event_number_in_session, next_type, next_value
|
||||
UNION ALL
|
||||
-- Others got to Top:valid
|
||||
SELECT others_n.event_number_in_session,
|
||||
'OTHER' AS event_type,
|
||||
NULL AS e_value,
|
||||
others_n.next_type,
|
||||
others_n.next_value,
|
||||
SUM(others_n.sessions_count) AS sessions_count
|
||||
FROM others_n
|
||||
WHERE isNotNull(others_n.next_type)
|
||||
AND (others_n.event_number_in_session + 1, others_n.next_type, others_n.next_value) IN
|
||||
(SELECT top_n.event_number_in_session, top_n.event_type, top_n.e_value FROM top_n)
|
||||
GROUP BY others_n.event_number_in_session, others_n.next_type, others_n.next_value
|
||||
UNION ALL
|
||||
-- Others got to Others
|
||||
SELECT others_n.event_number_in_session,
|
||||
'OTHER' AS event_type,
|
||||
NULL AS e_value,
|
||||
'OTHERS' AS next_type,
|
||||
NULL AS next_value,
|
||||
SUM(sessions_count) AS sessions_count
|
||||
FROM others_n
|
||||
WHERE isNotNull(others_n.next_type)
|
||||
AND others_n.event_number_in_session < 3
|
||||
AND (others_n.event_number_in_session + 1, others_n.next_type, others_n.next_value) NOT IN
|
||||
(SELECT event_number_in_session, event_type, e_value FROM top_n)
|
||||
GROUP BY others_n.event_number_in_session)
|
||||
ORDER BY event_number_in_session, sessions_count
|
||||
DESC;
|
||||
|
||||
|
||||
|
|
@ -1,17 +1,16 @@
|
|||
urllib3==2.3.0
|
||||
urllib3==2.4.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.38.32
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
psycopg[pool,binary]==3.2.9
|
||||
clickhouse-connect==0.8.17
|
||||
elasticsearch==9.0.1
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
uvicorn[standard]==0.34.0
|
||||
fastapi==0.115.12
|
||||
uvicorn[standard]==0.34.2
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
pydantic[email]==2.11.4
|
||||
apscheduler==3.11.0
|
||||
|
|
|
|||
|
|
@ -1,19 +1,18 @@
|
|||
urllib3==2.3.0
|
||||
urllib3==2.4.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.38.32
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
psycopg[pool,binary]==3.2.9
|
||||
clickhouse-connect==0.8.17
|
||||
elasticsearch==9.0.1
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
uvicorn[standard]==0.34.0
|
||||
fastapi==0.115.12
|
||||
uvicorn[standard]==0.34.2
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
pydantic[email]==2.11.4
|
||||
apscheduler==3.11.0
|
||||
|
||||
redis==5.2.1
|
||||
redis==6.1.0
|
||||
|
|
@ -4,8 +4,10 @@ from decouple import config
|
|||
from fastapi import Depends, Body, BackgroundTasks
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, projects, issues, metadata, reset_password, log_tools, \
|
||||
from chalicelib.core import projects, metadata, reset_password, log_tools, \
|
||||
announcements, weekly_report, assist, mobile, tenants, boarding, notifications, webhook, users, saved_search, tags
|
||||
from chalicelib.core.events import events
|
||||
from chalicelib.core.issues import issues
|
||||
from chalicelib.core.sourcemaps import sourcemaps
|
||||
from chalicelib.core.metrics import custom_metrics
|
||||
from chalicelib.core.alerts import alerts
|
||||
|
|
|
|||
|
|
@ -8,13 +8,14 @@ from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Re
|
|||
|
||||
import schemas
|
||||
from chalicelib.core import assist, signup, feature_flags
|
||||
from chalicelib.core import notes
|
||||
from chalicelib.core import scope
|
||||
from chalicelib.core import tenants, users, projects, license
|
||||
from chalicelib.core import webhook
|
||||
from chalicelib.core.collaborations.collaboration_slack import Slack
|
||||
from chalicelib.core.errors import errors, errors_details
|
||||
from chalicelib.core.metrics import heatmaps
|
||||
from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_viewed, \
|
||||
from chalicelib.core.sessions import sessions, sessions_replay, sessions_favorite, sessions_viewed, \
|
||||
sessions_assignments, unprocessed_sessions, sessions_search
|
||||
from chalicelib.utils import captcha, smtp
|
||||
from chalicelib.utils import contextual_validators
|
||||
|
|
@ -259,8 +260,7 @@ def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
|
|||
def search_sessions(projectId: int, data: schemas.SessionsSearchPayloadSchema = \
|
||||
Depends(contextual_validators.validate_contextual_payload),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id,
|
||||
platform=context.project.platform)
|
||||
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id)
|
||||
return {'data': data}
|
||||
|
||||
|
||||
|
|
@ -268,8 +268,7 @@ def search_sessions(projectId: int, data: schemas.SessionsSearchPayloadSchema =
|
|||
def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = \
|
||||
Depends(contextual_validators.validate_contextual_payload),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id, ids_only=True,
|
||||
platform=context.project.platform)
|
||||
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id, ids_only=True)
|
||||
return {'data': data}
|
||||
|
||||
|
||||
|
|
@ -475,8 +474,8 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str,
|
|||
|
||||
@app.get('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
|
||||
def get_note_by_id(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_notes.get_note(tenant_id=context.tenant_id, project_id=projectId, note_id=noteId,
|
||||
user_id=context.user_id)
|
||||
data = notes.get_note(tenant_id=context.tenant_id, project_id=projectId, note_id=noteId,
|
||||
user_id=context.user_id)
|
||||
if "errors" in data:
|
||||
return data
|
||||
return {
|
||||
|
|
@ -489,8 +488,8 @@ def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema
|
|||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if not sessions.session_exists(project_id=projectId, session_id=sessionId):
|
||||
return {"errors": ["Session not found"]}
|
||||
data = sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId,
|
||||
session_id=sessionId, user_id=context.user_id, data=data)
|
||||
data = notes.create(tenant_id=context.tenant_id, project_id=projectId,
|
||||
session_id=sessionId, user_id=context.user_id, data=data)
|
||||
if "errors" in data.keys():
|
||||
return data
|
||||
return {
|
||||
|
|
@ -500,8 +499,8 @@ def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema
|
|||
|
||||
@app.get('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"])
|
||||
def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId,
|
||||
session_id=sessionId, user_id=context.user_id)
|
||||
data = notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId,
|
||||
session_id=sessionId, user_id=context.user_id)
|
||||
if "errors" in data:
|
||||
return data
|
||||
return {
|
||||
|
|
@ -512,8 +511,8 @@ def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentCo
|
|||
@app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
|
||||
def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId, data=data)
|
||||
data = notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId, data=data)
|
||||
if "errors" in data.keys():
|
||||
return data
|
||||
return {
|
||||
|
|
@ -523,29 +522,29 @@ def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema
|
|||
|
||||
@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
|
||||
def delete_note(projectId: int, noteId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_notes.delete(project_id=projectId, note_id=noteId)
|
||||
data = notes.delete(project_id=projectId, note_id=noteId)
|
||||
return data
|
||||
|
||||
|
||||
@app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"])
|
||||
def share_note_to_slack(projectId: int, noteId: int, webhookId: int,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId, webhook_id=webhookId)
|
||||
return notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId, webhook_id=webhookId)
|
||||
|
||||
|
||||
@app.get('/{projectId}/notes/{noteId}/msteams/{webhookId}', tags=["sessions", "notes"])
|
||||
def share_note_to_msteams(projectId: int, noteId: int, webhookId: int,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return sessions_notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId, webhook_id=webhookId)
|
||||
return notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId, webhook_id=webhookId)
|
||||
|
||||
|
||||
@app.post('/{projectId}/notes', tags=["sessions", "notes"])
|
||||
def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId,
|
||||
user_id=context.user_id, data=data)
|
||||
data = notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId,
|
||||
user_id=context.user_id, data=data)
|
||||
if "errors" in data:
|
||||
return data
|
||||
return {'data': data}
|
||||
|
|
|
|||
|
|
@ -219,6 +219,17 @@ def get_card_chart(projectId: int, metric_id: int, data: schemas.CardSessionsSch
|
|||
return {"data": data}
|
||||
|
||||
|
||||
@app.post("/{projectId}/dashboards/{dashboardId}/cards/{metric_id}/chart", tags=["card"])
|
||||
# @app.post("/{projectId}/dashboards/{dashboardId}/cards/{metric_id}", tags=["card"])
|
||||
def get_card_chart_for_dashboard(projectId: int, dashboardId: int, metric_id: int,
|
||||
data: schemas.SavedCardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = custom_metrics.make_chart_from_card(
|
||||
project=context.project, user_id=context.user_id, metric_id=metric_id, data=data, for_dashboard=True
|
||||
)
|
||||
return {"data": data}
|
||||
|
||||
|
||||
@app.post("/{projectId}/cards/{metric_id}", tags=["dashboard"])
|
||||
def update_card(projectId: int, metric_id: int, data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
|
|
|
|||
77
api/routers/subs/product_analytics.py
Normal file
77
api/routers/subs/product_analytics.py
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
from typing import Annotated
|
||||
|
||||
from fastapi import Body, Depends, Query
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import metadata
|
||||
from chalicelib.core.product_analytics import events, properties, autocomplete, filters
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
from typing import Optional
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
|
||||
@app.get('/{projectId}/filters', tags=["product_analytics"])
|
||||
def get_all_filters(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {
|
||||
"data": {
|
||||
"events": events.get_events(project_id=projectId),
|
||||
"event": properties.get_all_properties(project_id=projectId),
|
||||
"session": filters.get_sessions_filters(project_id=projectId),
|
||||
"user": filters.get_users_filters(project_id=projectId),
|
||||
"metadata": metadata.get_for_filters(project_id=projectId)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@app.get('/{projectId}/events/names', tags=["product_analytics"])
|
||||
def get_all_events(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": events.get_events(project_id=projectId)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/properties/search', tags=["product_analytics"])
|
||||
def get_event_properties(projectId: int, en: str = Query(default=None, description="event name"),
|
||||
ac: bool = Query(description="auto captured"),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if not en or len(en) == 0:
|
||||
return {"data": []}
|
||||
return {"data": properties.get_event_properties(project_id=projectId, event_name=en, auto_captured=ac) \
|
||||
+ filters.get_global_filters(project_id=projectId)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/events/search', tags=["product_analytics"])
|
||||
def search_events(projectId: int, data: schemas.EventsSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": events.search_events(project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/lexicon/events', tags=["product_analytics", "lexicon"])
|
||||
def get_all_lexicon_events(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": events.get_lexicon(project_id=projectId, page=filter_query)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/lexicon/properties', tags=["product_analytics", "lexicon"])
|
||||
def get_all_lexicon_properties(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": properties.get_lexicon(project_id=projectId, page=filter_query)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/events/autocomplete', tags=["autocomplete"])
|
||||
def autocomplete_events(projectId: int, q: Optional[str] = None,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": autocomplete.search_events(project_id=projectId, q=None if not q or len(q) == 0 else q)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/properties/autocomplete', tags=["autocomplete"])
|
||||
def autocomplete_properties(projectId: int, propertyName: str, eventName: Optional[str] = None,
|
||||
q: Optional[str] = None, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
# Specify propertyName to get top values of that property
|
||||
# Specify eventName&propertyName to get top values of that property for the selected event
|
||||
return {"data": autocomplete.search_properties(project_id=projectId,
|
||||
event_name=None if not eventName \
|
||||
or len(eventName) == 0 else eventName,
|
||||
property_name=propertyName,
|
||||
q=None if not q or len(q) == 0 else q)}
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
import schemas
|
||||
from chalicelib.core.metrics import product_anaytics2
|
||||
from fastapi import Depends
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
|
||||
@app.post('/{projectId}/events/search', tags=["dashboard"])
|
||||
def search_events(projectId: int,
|
||||
# data: schemas.CreateDashboardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return product_anaytics2.search_events(project_id=projectId, data={})
|
||||
|
|
@ -1,10 +1,12 @@
|
|||
from fastapi import Body, Depends
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import Body, Depends, Query
|
||||
|
||||
import schemas
|
||||
from chalicelib.core.usability_testing import service
|
||||
from chalicelib.core.usability_testing.schema import UTTestCreate, UTTestUpdate, UTTestSearch
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
from schemas import schemas
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
tags = ["usability-tests"]
|
||||
|
|
@ -77,9 +79,7 @@ async def update_ut_test(projectId: int, test_id: int, test_update: UTTestUpdate
|
|||
|
||||
|
||||
@app.get('/{projectId}/usability-tests/{test_id}/sessions', tags=tags)
|
||||
async def get_sessions(projectId: int, test_id: int, page: int = 1, limit: int = 10,
|
||||
live: bool = False,
|
||||
user_id: str = None):
|
||||
async def get_sessions(projectId: int, test_id: int, filter_query: Annotated[schemas.UsabilityTestQuery, Query()]):
|
||||
"""
|
||||
Get sessions related to a specific UT test.
|
||||
|
||||
|
|
@ -87,21 +87,23 @@ async def get_sessions(projectId: int, test_id: int, page: int = 1, limit: int =
|
|||
- **test_id**: The unique identifier of the UT test.
|
||||
"""
|
||||
|
||||
if live:
|
||||
return service.ut_tests_sessions_live(projectId, test_id, page, limit)
|
||||
if filter_query.live:
|
||||
return service.ut_tests_sessions_live(projectId, test_id, filter_query.page, filter_query.limit)
|
||||
else:
|
||||
return service.ut_tests_sessions(projectId, test_id, page, limit, user_id, live)
|
||||
return service.ut_tests_sessions(projectId, test_id, filter_query.page, filter_query.limit,
|
||||
filter_query.user_id, filter_query.live)
|
||||
|
||||
|
||||
@app.get('/{projectId}/usability-tests/{test_id}/responses/{task_id}', tags=tags)
|
||||
async def get_responses(projectId: int, test_id: int, task_id: int, page: int = 1, limit: int = 10, query: str = None):
|
||||
async def get_responses(projectId: int, test_id: int, task_id: int,
|
||||
filter_query: Annotated[schemas.PaginatedSchema, Query()], query: str = None):
|
||||
"""
|
||||
Get responses related to a specific UT test.
|
||||
|
||||
- **project_id**: The unique identifier of the project.
|
||||
- **test_id**: The unique identifier of the UT test.
|
||||
"""
|
||||
return service.get_responses(test_id, task_id, page, limit, query)
|
||||
return service.get_responses(test_id, task_id, filter_query.page, filter_query.limit, query)
|
||||
|
||||
|
||||
@app.get('/{projectId}/usability-tests/{test_id}/statistics', tags=tags)
|
||||
|
|
|
|||
|
|
@ -1,2 +1,4 @@
|
|||
from .schemas import *
|
||||
from .product_analytics import *
|
||||
from . import overrides as _overrides
|
||||
from .schemas import _PaginatedSchema as PaginatedSchema
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ from enum import Enum as _Enum
|
|||
from pydantic import BaseModel as _BaseModel
|
||||
from pydantic import ConfigDict, TypeAdapter, Field
|
||||
from pydantic.types import AnyType
|
||||
from decouple import config, Choices
|
||||
|
||||
|
||||
def attribute_to_camel_case(snake_str: str) -> str:
|
||||
|
|
@ -21,7 +22,9 @@ def schema_extra(schema: dict, _):
|
|||
class BaseModel(_BaseModel):
|
||||
model_config = ConfigDict(alias_generator=attribute_to_camel_case,
|
||||
use_enum_values=True,
|
||||
json_schema_extra=schema_extra)
|
||||
json_schema_extra=schema_extra,
|
||||
extra=config("EXTRA_PAYLOAD_ATTRIBUTES", default="ignore",
|
||||
cast=Choices(["ignore", "forbid", "allow"])))
|
||||
|
||||
|
||||
class Enum(_Enum):
|
||||
|
|
|
|||
22
api/schemas/product_analytics.py
Normal file
22
api/schemas/product_analytics.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
from typing import Optional, List, Literal, Union, Annotated
|
||||
from pydantic import Field
|
||||
|
||||
from .overrides import BaseModel
|
||||
from .schemas import EventPropertiesSchema, SortOrderType, _TimedSchema, \
|
||||
_PaginatedSchema, PropertyFilterSchema
|
||||
|
||||
|
||||
class EventSearchSchema(BaseModel):
|
||||
is_event: Literal[True] = True
|
||||
name: str = Field(...)
|
||||
properties: Optional[EventPropertiesSchema] = Field(default=None)
|
||||
|
||||
|
||||
ProductAnalyticsGroupedFilter = Annotated[Union[EventSearchSchema, PropertyFilterSchema], \
|
||||
Field(discriminator='is_event')]
|
||||
|
||||
|
||||
class EventsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
||||
filters: List[ProductAnalyticsGroupedFilter] = Field(...)
|
||||
sort: str = Field(default="startTs")
|
||||
order: SortOrderType = Field(default=SortOrderType.DESC)
|
||||
|
|
@ -3,12 +3,13 @@ from typing import Optional, List, Union, Literal
|
|||
|
||||
from pydantic import Field, EmailStr, HttpUrl, SecretStr, AnyHttpUrl
|
||||
from pydantic import field_validator, model_validator, computed_field
|
||||
from pydantic import AfterValidator
|
||||
from pydantic.functional_validators import BeforeValidator
|
||||
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from .overrides import BaseModel, Enum, ORUnion
|
||||
from .transformers_validators import transform_email, remove_whitespace, remove_duplicate_values, single_to_list, \
|
||||
force_is_event, NAME_PATTERN, int_to_string, check_alphanumeric
|
||||
force_is_event, NAME_PATTERN, int_to_string, check_alphanumeric, check_regex
|
||||
|
||||
|
||||
class _GRecaptcha(BaseModel):
|
||||
|
|
@ -404,6 +405,9 @@ class EventType(str, Enum):
|
|||
REQUEST_MOBILE = "requestMobile"
|
||||
ERROR_MOBILE = "errorMobile"
|
||||
SWIPE_MOBILE = "swipeMobile"
|
||||
EVENT = "event"
|
||||
INCIDENT = "incident"
|
||||
CLICK_COORDINATES = "clickCoordinates"
|
||||
|
||||
|
||||
class PerformanceEventType(str, Enum):
|
||||
|
|
@ -464,6 +468,7 @@ class SearchEventOperator(str, Enum):
|
|||
NOT_CONTAINS = "notContains"
|
||||
STARTS_WITH = "startsWith"
|
||||
ENDS_WITH = "endsWith"
|
||||
PATTERN = "regex"
|
||||
|
||||
|
||||
class ClickEventExtraOperator(str, Enum):
|
||||
|
|
@ -503,8 +508,8 @@ class IssueType(str, Enum):
|
|||
CUSTOM = 'custom'
|
||||
JS_EXCEPTION = 'js_exception'
|
||||
MOUSE_THRASHING = 'mouse_thrashing'
|
||||
# IOS
|
||||
TAP_RAGE = 'tap_rage'
|
||||
TAP_RAGE = 'tap_rage' # IOS
|
||||
INCIDENT = 'incident'
|
||||
|
||||
|
||||
class MetricFormatType(str, Enum):
|
||||
|
|
@ -535,7 +540,7 @@ class GraphqlFilterType(str, Enum):
|
|||
class RequestGraphqlFilterSchema(BaseModel):
|
||||
type: Union[FetchFilterType, GraphqlFilterType] = Field(...)
|
||||
value: List[Union[int, str]] = Field(...)
|
||||
operator: Union[SearchEventOperator, MathOperator] = Field(...)
|
||||
operator: Annotated[Union[SearchEventOperator, MathOperator], AfterValidator(check_regex)] = Field(...)
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
|
|
@ -545,7 +550,85 @@ class RequestGraphqlFilterSchema(BaseModel):
|
|||
return values
|
||||
|
||||
|
||||
class SessionSearchEventSchema2(BaseModel):
|
||||
class EventPredefinedPropertyType(str, Enum):
|
||||
TIME = "$time"
|
||||
SOURCE = "$source"
|
||||
DURATION_S = "$duration_s"
|
||||
DESCRIPTION = "description"
|
||||
AUTO_CAPTURED = "$auto_captured"
|
||||
SDK_EDITION = "$sdk_edition"
|
||||
SDK_VERSION = "$sdk_version"
|
||||
DEVICE_ID = "$device_id"
|
||||
OS = "$os"
|
||||
OS_VERSION = "$os_version"
|
||||
BROWSER = "$browser"
|
||||
BROWSER_VERSION = "$browser_version"
|
||||
DEVICE = "$device"
|
||||
SCREEN_HEIGHT = "$screen_height"
|
||||
SCREEN_WIDTH = "$screen_width"
|
||||
CURRENT_URL = "$current_url"
|
||||
INITIAL_REFERRER = "$initial_referrer"
|
||||
REFERRING_DOMAIN = "$referring_domain"
|
||||
REFERRER = "$referrer"
|
||||
INITIAL_REFERRING_DOMAIN = "$initial_referring_domain"
|
||||
SEARCH_ENGINE = "$search_engine"
|
||||
SEARCH_ENGINE_KEYWORD = "$search_engine_keyword"
|
||||
UTM_SOURCE = "utm_source"
|
||||
UTM_MEDIUM = "utm_medium"
|
||||
UTM_CAMPAIGN = "utm_campaign"
|
||||
COUNTRY = "$country"
|
||||
STATE = "$state"
|
||||
CITY = "$city"
|
||||
ISSUE_TYPE = "issue_type"
|
||||
TAGS = "$tags"
|
||||
IMPORT = "$import"
|
||||
|
||||
|
||||
class PropertyType(str, Enum):
|
||||
INT = "int"
|
||||
FLOAT = "float"
|
||||
DATETIME = "datetime"
|
||||
STRING = "string"
|
||||
ARRAY = "array"
|
||||
TUPLE = "tuple"
|
||||
MAP = "map"
|
||||
NESTED = "nested"
|
||||
|
||||
|
||||
class PropertyFilterSchema(BaseModel):
|
||||
is_event: Literal[False] = False
|
||||
name: Union[EventPredefinedPropertyType, str] = Field(...)
|
||||
operator: Union[SearchEventOperator, MathOperator] = Field(...)
|
||||
value: List[Union[int, str]] = Field(...)
|
||||
data_type: PropertyType = Field(default=PropertyType.STRING.value)
|
||||
|
||||
# property_type: Optional[Literal["string", "number", "date"]] = Field(default=None)
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def is_predefined(self) -> bool:
|
||||
return EventPredefinedPropertyType.has_value(self.name)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def transform_name(self):
|
||||
if isinstance(self.name, Enum):
|
||||
self.name = self.name.value
|
||||
return self
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
class EventPropertiesSchema(BaseModel):
|
||||
operator: Literal["and", "or"] = Field(...)
|
||||
filters: List[PropertyFilterSchema] = Field(...)
|
||||
|
||||
|
||||
class SessionSearchEventSchema(BaseModel):
|
||||
is_event: Literal[True] = True
|
||||
value: List[Union[str, int]] = Field(...)
|
||||
type: Union[EventType, PerformanceEventType] = Field(...)
|
||||
|
|
@ -553,6 +636,7 @@ class SessionSearchEventSchema2(BaseModel):
|
|||
source: Optional[List[Union[ErrorSource, int, str]]] = Field(default=None)
|
||||
sourceOperator: Optional[MathOperator] = Field(default=None)
|
||||
filters: Optional[List[RequestGraphqlFilterSchema]] = Field(default_factory=list)
|
||||
properties: Optional[EventPropertiesSchema] = Field(default=None)
|
||||
|
||||
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
|
||||
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
|
||||
|
|
@ -577,23 +661,44 @@ class SessionSearchEventSchema2(BaseModel):
|
|||
elif self.type == EventType.GRAPHQL:
|
||||
assert isinstance(self.filters, List) and len(self.filters) > 0, \
|
||||
f"filters should be defined for {EventType.GRAPHQL}"
|
||||
elif self.type == EventType.CLICK_COORDINATES:
|
||||
assert isinstance(self.value, List) \
|
||||
and (len(self.value) == 0 or len(self.value) == 2 or len(self.value) == 4), \
|
||||
f"value should be [x,y] or [x1,x2,y1,y2] for {EventType.CLICK_COORDINATES}"
|
||||
|
||||
if isinstance(self.operator, ClickEventExtraOperator):
|
||||
assert self.type == EventType.CLICK, \
|
||||
f"operator:{self.operator} is only available for event-type: {EventType.CLICK}"
|
||||
return self
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
class SessionSearchFilterSchema(BaseModel):
|
||||
is_event: Literal[False] = False
|
||||
value: List[Union[IssueType, PlatformType, int, str]] = Field(default_factory=list)
|
||||
type: FilterType = Field(...)
|
||||
type: Union[FilterType, str] = Field(...)
|
||||
operator: Union[SearchEventOperator, MathOperator] = Field(...)
|
||||
source: Optional[Union[ErrorSource, str]] = Field(default=None)
|
||||
# used for global-properties
|
||||
data_type: Optional[PropertyType] = Field(default=PropertyType.STRING.value)
|
||||
# used to tell if the current filter is predefined or user-property having the same name
|
||||
# used in case the user sends a property with the same name aas a predefined property
|
||||
auto_captured: Optional[bool] = Field(default=True)
|
||||
|
||||
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
|
||||
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def is_predefined(self) -> bool:
|
||||
return FilterType.has_value(self.type)
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def _transform_data(cls, values):
|
||||
|
|
@ -640,6 +745,13 @@ class SessionSearchFilterSchema(BaseModel):
|
|||
|
||||
return self
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
class _PaginatedSchema(BaseModel):
|
||||
limit: int = Field(default=200, gt=0, le=200)
|
||||
|
|
@ -660,12 +772,12 @@ def add_missing_is_event(values: dict):
|
|||
|
||||
|
||||
# this type is created to allow mixing events&filters and specifying a discriminator
|
||||
GroupedFilterType = Annotated[Union[SessionSearchFilterSchema, SessionSearchEventSchema2],
|
||||
GroupedFilterType = Annotated[Union[SessionSearchFilterSchema, SessionSearchEventSchema],
|
||||
Field(discriminator='is_event'), BeforeValidator(add_missing_is_event)]
|
||||
|
||||
|
||||
class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
||||
events: List[SessionSearchEventSchema2] = Field(default_factory=list, doc_hidden=True)
|
||||
events: List[SessionSearchEventSchema] = Field(default_factory=list, doc_hidden=True)
|
||||
filters: List[GroupedFilterType] = Field(default_factory=list)
|
||||
sort: str = Field(default="startTs")
|
||||
order: SortOrderType = Field(default=SortOrderType.DESC)
|
||||
|
|
@ -690,6 +802,8 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
|||
def add_missing_attributes(cls, values):
|
||||
# in case isEvent is wrong:
|
||||
for f in values.get("filters") or []:
|
||||
if f.get("type") is None:
|
||||
continue
|
||||
if EventType.has_value(f["type"]) and not f.get("isEvent"):
|
||||
f["isEvent"] = True
|
||||
elif FilterType.has_value(f["type"]) and f.get("isEvent"):
|
||||
|
|
@ -715,6 +829,15 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
|||
f["value"] = vals
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def check_pa_event_filter(self):
|
||||
for v in self.filters + self.events:
|
||||
if v.type == EventType.EVENT:
|
||||
assert v.operator in (SearchEventOperator.IS, MathOperator.EQUAL), \
|
||||
"operator must be {SearchEventOperator.IS} or {MathOperator.EQUAL} for EVENT type"
|
||||
assert len(v.value) == 1, "value must have 1 single value for EVENT type"
|
||||
return self
|
||||
|
||||
@model_validator(mode="after")
|
||||
def split_filters_events(self):
|
||||
n_filters = []
|
||||
|
|
@ -795,6 +918,13 @@ class PathAnalysisSubFilterSchema(BaseModel):
|
|||
values["isEvent"] = True
|
||||
return values
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
class _ProductAnalyticsFilter(BaseModel):
|
||||
is_event: Literal[False] = False
|
||||
|
|
@ -805,6 +935,13 @@ class _ProductAnalyticsFilter(BaseModel):
|
|||
|
||||
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
class _ProductAnalyticsEventFilter(BaseModel):
|
||||
is_event: Literal[True] = True
|
||||
|
|
@ -815,6 +952,13 @@ class _ProductAnalyticsEventFilter(BaseModel):
|
|||
|
||||
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
# this type is created to allow mixing events&filters and specifying a discriminator for PathAnalysis series filter
|
||||
ProductAnalyticsFilter = Annotated[Union[_ProductAnalyticsFilter, _ProductAnalyticsEventFilter],
|
||||
|
|
@ -909,11 +1053,16 @@ class MetricOfPathAnalysis(str, Enum):
|
|||
session_count = MetricOfTimeseries.SESSION_COUNT.value
|
||||
|
||||
|
||||
# class CardSessionsSchema(SessionsSearchPayloadSchema):
|
||||
class CardSessionsSchema(_TimedSchema, _PaginatedSchema):
|
||||
startTimestamp: int = Field(default=TimeUTC.now(-7))
|
||||
endTimestamp: int = Field(default=TimeUTC.now())
|
||||
density: int = Field(default=7, ge=1, le=200)
|
||||
# we need metric_type&metric_of in the payload of sessions search
|
||||
# because the API will retrun all sessions if the card is not identified
|
||||
# example: table of requests contains only sessions that have a request,
|
||||
# but drill-down doesn't take that into consideration
|
||||
metric_type: MetricType = Field(...)
|
||||
metric_of: Any
|
||||
series: List[CardSeriesSchema] = Field(default_factory=list)
|
||||
|
||||
# events: List[SessionSearchEventSchema2] = Field(default_factory=list, doc_hidden=True)
|
||||
|
|
@ -960,36 +1109,6 @@ class CardSessionsSchema(_TimedSchema, _PaginatedSchema):
|
|||
|
||||
return self
|
||||
|
||||
# We don't need this as the UI is expecting filters to override the full series' filters
|
||||
# @model_validator(mode="after")
|
||||
# def __merge_out_filters_with_series(self):
|
||||
# for f in self.filters:
|
||||
# for s in self.series:
|
||||
# found = False
|
||||
#
|
||||
# if f.is_event:
|
||||
# sub = s.filter.events
|
||||
# else:
|
||||
# sub = s.filter.filters
|
||||
#
|
||||
# for e in sub:
|
||||
# if f.type == e.type and f.operator == e.operator:
|
||||
# found = True
|
||||
# if f.is_event:
|
||||
# # If extra event: append value
|
||||
# for v in f.value:
|
||||
# if v not in e.value:
|
||||
# e.value.append(v)
|
||||
# else:
|
||||
# # If extra filter: override value
|
||||
# e.value = f.value
|
||||
# if not found:
|
||||
# sub.append(f)
|
||||
#
|
||||
# self.filters = []
|
||||
#
|
||||
# return self
|
||||
|
||||
# UI is expecting filters to override the full series' filters
|
||||
@model_validator(mode="after")
|
||||
def __override_series_filters_with_outer_filters(self):
|
||||
|
|
@ -1008,6 +1127,11 @@ class CardSessionsSchema(_TimedSchema, _PaginatedSchema):
|
|||
return self
|
||||
|
||||
|
||||
class SavedCardSchema(CardSessionsSchema):
|
||||
metric_type: Optional[MetricType] = Field(default=None)
|
||||
metric_of: Optional[Any] = Field(default=None)
|
||||
|
||||
|
||||
class CardConfigSchema(BaseModel):
|
||||
col: Optional[int] = Field(default=None)
|
||||
row: Optional[int] = Field(default=2)
|
||||
|
|
@ -1021,8 +1145,6 @@ class __CardSchema(CardSessionsSchema):
|
|||
thumbnail: Optional[str] = Field(default=None)
|
||||
metric_format: Optional[MetricFormatType] = Field(default=None)
|
||||
view_type: Any
|
||||
metric_type: MetricType = Field(...)
|
||||
metric_of: Any
|
||||
metric_value: List[IssueType] = Field(default_factory=list)
|
||||
# This is used to save the selected session for heatmaps
|
||||
session_id: Optional[int] = Field(default=None)
|
||||
|
|
@ -1060,6 +1182,16 @@ class CardTable(__CardSchema):
|
|||
values["metricValue"] = []
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __enforce_AND_operator(self):
|
||||
self.metric_of = MetricOfTable(self.metric_of)
|
||||
if self.metric_of in (MetricOfTable.VISITED_URL, MetricOfTable.FETCH, \
|
||||
MetricOfTable.VISITED_URL.value, MetricOfTable.FETCH.value):
|
||||
for s in self.series:
|
||||
if s.filter is not None:
|
||||
s.filter.events_order = SearchEventOrder.AND
|
||||
return self
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __transform(self):
|
||||
self.metric_of = MetricOfTable(self.metric_of)
|
||||
|
|
@ -1135,7 +1267,7 @@ class CardPathAnalysis(__CardSchema):
|
|||
view_type: MetricOtherViewType = Field(...)
|
||||
metric_value: List[ProductAnalyticsSelectedEventType] = Field(default_factory=list)
|
||||
density: int = Field(default=4, ge=2, le=10)
|
||||
rows: int = Field(default=3, ge=1, le=10)
|
||||
rows: int = Field(default=5, ge=1, le=10)
|
||||
|
||||
start_type: Literal["start", "end"] = Field(default="start")
|
||||
start_point: List[PathAnalysisSubFilterSchema] = Field(default_factory=list)
|
||||
|
|
@ -1279,6 +1411,13 @@ class LiveSessionSearchFilterSchema(BaseModel):
|
|||
assert len(self.source) > 0, "source should not be empty for METADATA type"
|
||||
return self
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
class LiveSessionsSearchPayloadSchema(_PaginatedSchema):
|
||||
filters: List[LiveSessionSearchFilterSchema] = Field([])
|
||||
|
|
@ -1404,8 +1543,8 @@ class MetricSearchSchema(_PaginatedSchema):
|
|||
mine_only: bool = Field(default=False)
|
||||
|
||||
|
||||
class _HeatMapSearchEventRaw(SessionSearchEventSchema2):
|
||||
type: Literal[EventType.LOCATION] = Field(...)
|
||||
class _HeatMapSearchEventRaw(SessionSearchEventSchema):
|
||||
type: Literal[EventType.LOCATION, EventType.CLICK_COORDINATES] = Field(...)
|
||||
|
||||
|
||||
class HeatMapSessionsSearch(SessionsSearchPayloadSchema):
|
||||
|
|
@ -1529,3 +1668,34 @@ class TagCreate(TagUpdate):
|
|||
|
||||
class ScopeSchema(BaseModel):
|
||||
scope: int = Field(default=1, ge=1, le=2)
|
||||
|
||||
|
||||
class SessionModel(BaseModel):
|
||||
duration: int
|
||||
errorsCount: int
|
||||
eventsCount: int
|
||||
issueScore: int
|
||||
issueTypes: List[IssueType] = Field(default=[])
|
||||
metadata: dict = Field(default={})
|
||||
pagesCount: int
|
||||
platform: str
|
||||
projectId: int
|
||||
sessionId: str
|
||||
startTs: int
|
||||
timezone: Optional[str]
|
||||
userAnonymousId: Optional[str]
|
||||
userBrowser: str
|
||||
userCity: str
|
||||
userCountry: str
|
||||
userDevice: Optional[str]
|
||||
userDeviceType: str
|
||||
userId: Optional[str]
|
||||
userOs: str
|
||||
userState: str
|
||||
userUuid: str
|
||||
viewed: bool = Field(default=False)
|
||||
|
||||
|
||||
class UsabilityTestQuery(_PaginatedSchema):
|
||||
live: bool = Field(default=False)
|
||||
user_id: Optional[str] = Field(default=None)
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
import re
|
||||
from typing import Union, Any, Type
|
||||
|
||||
from pydantic import ValidationInfo
|
||||
|
||||
from .overrides import Enum
|
||||
|
||||
NAME_PATTERN = r"^[a-z,A-Z,0-9,\-,é,è,à,ç, ,|,&,\/,\\,_,.,#]*$"
|
||||
NAME_PATTERN = r"^[a-z,A-Z,0-9,\-,é,è,à,ç, ,|,&,\/,\\,_,.,#,']*$"
|
||||
|
||||
|
||||
def transform_email(email: str) -> str:
|
||||
|
|
@ -57,3 +58,17 @@ def check_alphanumeric(v: str, info: ValidationInfo) -> str:
|
|||
is_alphanumeric = v.replace(' ', '').isalnum()
|
||||
assert is_alphanumeric, f'{info.field_name} must be alphanumeric'
|
||||
return v
|
||||
|
||||
|
||||
def check_regex(v: str) -> str:
|
||||
assert v is not None, "Regex is null"
|
||||
assert isinstance(v, str), "Regex value must be a string"
|
||||
assert len(v) > 0, "Regex is empty"
|
||||
is_valid = None
|
||||
try:
|
||||
re.compile(v)
|
||||
except re.error as exc:
|
||||
is_valid = f"Invalid regex: {exc} (at position {exc.pos})"
|
||||
|
||||
assert is_valid is None, is_valid
|
||||
return v
|
||||
|
|
|
|||
61
assist-server/build.sh
Normal file
61
assist-server/build.sh
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh <ee>
|
||||
|
||||
ARCH=${ARCH:-amd64}
|
||||
git_sha=$(git rev-parse --short HEAD)
|
||||
image_tag=${IMAGE_TAG:-git_sha}
|
||||
check_prereq() {
|
||||
which docker || {
|
||||
echo "Docker not installed, please install docker."
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
source ../scripts/lib/_docker.sh
|
||||
|
||||
[[ $PATCH -eq 1 ]] && {
|
||||
image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
image_tag="${image_tag}-ee"
|
||||
}
|
||||
update_helm_release() {
|
||||
chart=$1
|
||||
HELM_TAG="$(grep -iER ^version ../scripts/helmcharts/openreplay/charts/$chart | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
# Update the chart version
|
||||
sed -i "s#^version.*#version: $HELM_TAG# g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Update image tags
|
||||
sed -i "s#ppVersion.*#ppVersion: \"$image_tag\"#g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Commit the changes
|
||||
git add ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
git commit -m "chore(helm): Updating $chart image release"
|
||||
}
|
||||
|
||||
function build_api() {
|
||||
destination="_assist-server_ee"
|
||||
[[ -d ../${destination} ]] && {
|
||||
echo "Removing previous build cache"
|
||||
rm -rf ../${destination}
|
||||
}
|
||||
cp -R ../assist-server ../${destination}
|
||||
cd ../${destination} || exit 1
|
||||
cp -rf ../ee/assist-server/* ./
|
||||
|
||||
docker build -f ./Dockerfile --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/assist-server:${image_tag} .
|
||||
|
||||
cd ../assist-server || exit 1
|
||||
rm -rf ../${destination}
|
||||
[[ $PUSH_IMAGE -eq 1 ]] && {
|
||||
docker push ${DOCKER_REPO:-'local'}/assist-server:${image_tag}
|
||||
docker tag ${DOCKER_REPO:-'local'}/assist-server:${image_tag} ${DOCKER_REPO:-'local'}/assist-server:latest
|
||||
docker push ${DOCKER_REPO:-'local'}/assist-server:latest
|
||||
}
|
||||
[[ $SIGN_IMAGE -eq 1 ]] && {
|
||||
cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/assist-server:${image_tag}
|
||||
}
|
||||
echo "build completed for assist-server"
|
||||
}
|
||||
|
||||
check_prereq
|
||||
build_api $1
|
||||
if [[ $PATCH -eq 1 ]]; then
|
||||
update_helm_release assist-server
|
||||
fi
|
||||
|
|
@ -19,14 +19,16 @@ const EVENTS_DEFINITION = {
|
|||
}
|
||||
};
|
||||
EVENTS_DEFINITION.emit = {
|
||||
NEW_AGENT: "NEW_AGENT",
|
||||
NO_AGENTS: "NO_AGENT",
|
||||
AGENT_DISCONNECT: "AGENT_DISCONNECTED",
|
||||
AGENTS_CONNECTED: "AGENTS_CONNECTED",
|
||||
NO_SESSIONS: "SESSION_DISCONNECTED",
|
||||
SESSION_ALREADY_CONNECTED: "SESSION_ALREADY_CONNECTED",
|
||||
SESSION_RECONNECTED: "SESSION_RECONNECTED",
|
||||
UPDATE_EVENT: EVENTS_DEFINITION.listen.UPDATE_EVENT
|
||||
NEW_AGENT: "NEW_AGENT",
|
||||
NO_AGENTS: "NO_AGENT",
|
||||
AGENT_DISCONNECT: "AGENT_DISCONNECTED",
|
||||
AGENTS_CONNECTED: "AGENTS_CONNECTED",
|
||||
AGENTS_INFO_CONNECTED: "AGENTS_INFO_CONNECTED",
|
||||
NO_SESSIONS: "SESSION_DISCONNECTED",
|
||||
SESSION_ALREADY_CONNECTED: "SESSION_ALREADY_CONNECTED",
|
||||
SESSION_RECONNECTED: "SESSION_RECONNECTED",
|
||||
UPDATE_EVENT: EVENTS_DEFINITION.listen.UPDATE_EVENT,
|
||||
WEBRTC_CONFIG: "WEBRTC_CONFIG",
|
||||
};
|
||||
|
||||
const BASE_sessionInfo = {
|
||||
|
|
|
|||
|
|
@ -27,9 +27,14 @@ const respond = function (req, res, data) {
|
|||
res.setHeader('Content-Type', 'application/json');
|
||||
res.end(JSON.stringify(result));
|
||||
} else {
|
||||
res.cork(() => {
|
||||
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
|
||||
});
|
||||
if (!res.aborted) {
|
||||
res.cork(() => {
|
||||
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
|
||||
});
|
||||
} else {
|
||||
logger.debug("response aborted");
|
||||
return;
|
||||
}
|
||||
}
|
||||
const duration = performance.now() - req.startTs;
|
||||
IncreaseTotalRequests();
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ const findSessionSocketId = async (io, roomId, tabId) => {
|
|||
};
|
||||
|
||||
async function getRoomData(io, roomID) {
|
||||
let tabsCount = 0, agentsCount = 0, tabIDs = [], agentIDs = [];
|
||||
let tabsCount = 0, agentsCount = 0, tabIDs = [], agentIDs = [], config = null, agentInfos = [];
|
||||
const connected_sockets = await io.in(roomID).fetchSockets();
|
||||
if (connected_sockets.length > 0) {
|
||||
for (let socket of connected_sockets) {
|
||||
|
|
@ -52,13 +52,19 @@ async function getRoomData(io, roomID) {
|
|||
} else {
|
||||
agentsCount++;
|
||||
agentIDs.push(socket.id);
|
||||
agentInfos.push({ ...socket.handshake.query.agentInfo, socketId: socket.id });
|
||||
if (socket.handshake.query.config !== undefined) {
|
||||
config = socket.handshake.query.config;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
tabsCount = -1;
|
||||
agentsCount = -1;
|
||||
agentInfos = [];
|
||||
agentIDs = [];
|
||||
}
|
||||
return {tabsCount, agentsCount, tabIDs, agentIDs};
|
||||
return {tabsCount, agentsCount, tabIDs, agentIDs, config, agentInfos};
|
||||
}
|
||||
|
||||
function processNewSocket(socket) {
|
||||
|
|
@ -78,7 +84,7 @@ async function onConnect(socket) {
|
|||
IncreaseOnlineConnections(socket.handshake.query.identity);
|
||||
|
||||
const io = getServer();
|
||||
const {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(io, socket.handshake.query.roomId);
|
||||
const {tabsCount, agentsCount, tabIDs, agentInfos, agentIDs, config} = await getRoomData(io, socket.handshake.query.roomId);
|
||||
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
// Check if session with the same tabID already connected, if so, refuse new connexion
|
||||
|
|
@ -100,7 +106,9 @@ async function onConnect(socket) {
|
|||
// Inform all connected agents about reconnected session
|
||||
if (agentsCount > 0) {
|
||||
logger.debug(`notifying new session about agent-existence`);
|
||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.WEBRTC_CONFIG, config);
|
||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.AGENTS_CONNECTED, agentIDs);
|
||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.AGENTS_INFO_CONNECTED, agentInfos);
|
||||
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.SESSION_RECONNECTED, socket.id);
|
||||
}
|
||||
} else if (tabsCount <= 0) {
|
||||
|
|
@ -118,7 +126,8 @@ async function onConnect(socket) {
|
|||
// Stats
|
||||
startAssist(socket, socket.handshake.query.agentID);
|
||||
}
|
||||
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.NEW_AGENT, socket.id, socket.handshake.query.agentInfo);
|
||||
io.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.WEBRTC_CONFIG, socket.handshake.query.config);
|
||||
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.NEW_AGENT, socket.id, { ...socket.handshake.query.agentInfo });
|
||||
}
|
||||
|
||||
// Set disconnect handler
|
||||
|
|
|
|||
30
backend/Makefile
Normal file
30
backend/Makefile
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
ee ?= "false" # true to build ee
|
||||
app ?= "" # app name, default all
|
||||
arch ?= "amd64" # default amd64
|
||||
docker_runtime ?= "docker" # default docker runtime
|
||||
|
||||
.PHONY: help
|
||||
help: ## Prints help for targets with comments
|
||||
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-25s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
|
||||
|
||||
##@ Docker
|
||||
|
||||
.PHONY: build
|
||||
build: ## Build the backend. ee=true for ee build. app=app name for only one app. Default build all apps.
|
||||
ARCH=$(arch) DOCKER_RUNTIME=$(docker_runtime) bash build.sh $(ee) $(app)
|
||||
|
||||
##@ Local Dev
|
||||
|
||||
.PHONY: scan
|
||||
scan: ## Scan the backend
|
||||
@trivy fs -q .
|
||||
|
||||
.PHONY: update
|
||||
update: ## Update the backend dependecies
|
||||
@echo Updating dependencies
|
||||
@go get -u -v ./...
|
||||
@go mod tidy
|
||||
|
||||
run: ## Run the backend. app=app name for app to run
|
||||
@if [ $(app) == "" ]; then echo "Error: app parameter is required. Usage: make run app=<app_name>"; exit 1; fi
|
||||
@go run "cmd/$(app)/main.go"
|
||||
|
|
@ -2,44 +2,71 @@ package main
|
|||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
|
||||
analyticsConfig "openreplay/backend/internal/config/analytics"
|
||||
"openreplay/backend/pkg/analytics"
|
||||
"openreplay/backend/pkg/db/postgres/pool"
|
||||
"openreplay/backend/pkg/logger"
|
||||
"openreplay/backend/pkg/metrics"
|
||||
"openreplay/backend/pkg/metrics/database"
|
||||
"openreplay/backend/pkg/metrics/web"
|
||||
"openreplay/backend/pkg/server"
|
||||
"openreplay/backend/pkg/server/api"
|
||||
)
|
||||
|
||||
func main() {
|
||||
ctx := context.Background()
|
||||
log := logger.New()
|
||||
cfg := analyticsConfig.New(log)
|
||||
// Observability
|
||||
webMetrics := web.New("analytics")
|
||||
dbMetrics := database.New("analytics")
|
||||
metrics.New(log, append(webMetrics.List(), dbMetrics.List()...))
|
||||
log.Info(ctx, "Cacher service started")
|
||||
|
||||
pgConn, err := pool.New(dbMetrics, cfg.Postgres.String())
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "can't init postgres connection: %s", err)
|
||||
sigchan := make(chan os.Signal, 1)
|
||||
signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)
|
||||
|
||||
for {
|
||||
select {
|
||||
case sig := <-sigchan:
|
||||
log.Error(ctx, "Caught signal %v: terminating", sig)
|
||||
os.Exit(0)
|
||||
}
|
||||
}
|
||||
defer pgConn.Close()
|
||||
|
||||
builder, err := analytics.NewServiceBuilder(log, cfg, webMetrics, dbMetrics, pgConn)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "can't init services: %s", err)
|
||||
}
|
||||
|
||||
router, err := api.NewRouter(&cfg.HTTP, log)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed while creating router: %s", err)
|
||||
}
|
||||
router.AddHandlers(api.NoPrefix, builder.CardsAPI, builder.DashboardsAPI, builder.ChartsAPI)
|
||||
router.AddMiddlewares(builder.Auth.Middleware, builder.RateLimiter.Middleware, builder.AuditTrail.Middleware)
|
||||
|
||||
server.Run(ctx, log, &cfg.HTTP, router)
|
||||
}
|
||||
|
||||
//
|
||||
//import (
|
||||
// "context"
|
||||
//
|
||||
// analyticsConfig "openreplay/backend/internal/config/analytics"
|
||||
// "openreplay/backend/pkg/analytics"
|
||||
// "openreplay/backend/pkg/db/postgres/pool"
|
||||
// "openreplay/backend/pkg/logger"
|
||||
// "openreplay/backend/pkg/metrics"
|
||||
// "openreplay/backend/pkg/metrics/database"
|
||||
// "openreplay/backend/pkg/metrics/web"
|
||||
// "openreplay/backend/pkg/server"
|
||||
// "openreplay/backend/pkg/server/api"
|
||||
//)
|
||||
//
|
||||
//func main() {
|
||||
// ctx := context.Background()
|
||||
// log := logger.New()
|
||||
// cfg := analyticsConfig.New(log)
|
||||
// // Observability
|
||||
// webMetrics := web.New("analytics")
|
||||
// dbMetrics := database.New("analytics")
|
||||
// metrics.New(log, append(webMetrics.List(), dbMetrics.List()...))
|
||||
//
|
||||
// pgConn, err := pool.New(dbMetrics, cfg.Postgres.String())
|
||||
// if err != nil {
|
||||
// log.Fatal(ctx, "can't init postgres connection: %s", err)
|
||||
// }
|
||||
// defer pgConn.Close()
|
||||
//
|
||||
// builder, err := analytics.NewServiceBuilder(log, cfg, webMetrics, dbMetrics, pgConn)
|
||||
// if err != nil {
|
||||
// log.Fatal(ctx, "can't init services: %s", err)
|
||||
// }
|
||||
//
|
||||
// router, err := api.NewRouter(&cfg.HTTP, log)
|
||||
// if err != nil {
|
||||
// log.Fatal(ctx, "failed while creating router: %s", err)
|
||||
// }
|
||||
// router.AddHandlers(api.NoPrefix, builder.CardsAPI, builder.DashboardsAPI, builder.ChartsAPI)
|
||||
// router.AddMiddlewares(builder.Auth.Middleware, builder.RateLimiter.Middleware, builder.AuditTrail.Middleware)
|
||||
//
|
||||
// server.Run(ctx, log, &cfg.HTTP, router)
|
||||
//}
|
||||
|
|
|
|||
|
|
@ -66,11 +66,11 @@ func main() {
|
|||
messages.MsgMetadata, messages.MsgIssueEvent, messages.MsgSessionStart, messages.MsgSessionEnd,
|
||||
messages.MsgUserID, messages.MsgUserAnonymousID, messages.MsgIntegrationEvent, messages.MsgPerformanceTrackAggr,
|
||||
messages.MsgJSException, messages.MsgResourceTiming, messages.MsgCustomEvent, messages.MsgCustomIssue,
|
||||
messages.MsgNetworkRequest, messages.MsgGraphQL, messages.MsgStateAction, messages.MsgMouseClick,
|
||||
messages.MsgFetch, messages.MsgNetworkRequest, messages.MsgGraphQL, messages.MsgStateAction, messages.MsgMouseClick,
|
||||
messages.MsgMouseClickDeprecated, messages.MsgSetPageLocation, messages.MsgSetPageLocationDeprecated,
|
||||
messages.MsgPageLoadTiming, messages.MsgPageRenderTiming,
|
||||
messages.MsgPageEvent, messages.MsgPageEventDeprecated, messages.MsgMouseThrashing, messages.MsgInputChange,
|
||||
messages.MsgUnbindNodes, messages.MsgCanvasNode, messages.MsgTagTrigger,
|
||||
messages.MsgUnbindNodes, messages.MsgCanvasNode, messages.MsgTagTrigger, messages.MsgIncident,
|
||||
// Mobile messages
|
||||
messages.MsgMobileSessionStart, messages.MsgMobileSessionEnd, messages.MsgMobileUserID, messages.MsgMobileUserAnonymousID,
|
||||
messages.MsgMobileMetadata, messages.MsgMobileEvent, messages.MsgMobileNetworkCall,
|
||||
|
|
|
|||
|
|
@ -100,6 +100,7 @@ func main() {
|
|||
// Process assets
|
||||
if msg.TypeID() == messages.MsgSetNodeAttributeURLBased ||
|
||||
msg.TypeID() == messages.MsgSetCSSDataURLBased ||
|
||||
msg.TypeID() == messages.MsgCSSInsertRuleURLBased ||
|
||||
msg.TypeID() == messages.MsgAdoptedSSReplaceURLBased ||
|
||||
msg.TypeID() == messages.MsgAdoptedSSInsertRuleURLBased {
|
||||
m := msg.Decode()
|
||||
|
|
|
|||
|
|
@ -1,52 +1,54 @@
|
|||
module openreplay/backend
|
||||
|
||||
go 1.23
|
||||
go 1.23.0
|
||||
|
||||
toolchain go1.23.1
|
||||
|
||||
require (
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0
|
||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.0
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.32.1
|
||||
github.com/DataDog/datadog-api-client-go/v2 v2.34.0
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.34.0
|
||||
github.com/DataDog/datadog-api-client-go/v2 v2.37.1
|
||||
github.com/Masterminds/semver v1.5.0
|
||||
github.com/andybalholm/brotli v1.1.1
|
||||
github.com/aws/aws-sdk-go v1.55.6
|
||||
github.com/btcsuite/btcutil v1.0.2
|
||||
github.com/confluentinc/confluent-kafka-go/v2 v2.8.0
|
||||
github.com/confluentinc/confluent-kafka-go/v2 v2.10.0
|
||||
github.com/docker/distribution v2.8.3+incompatible
|
||||
github.com/elastic/go-elasticsearch/v7 v7.17.10
|
||||
github.com/elastic/go-elasticsearch/v8 v8.17.0
|
||||
github.com/getsentry/sentry-go v0.31.1
|
||||
github.com/go-playground/validator/v10 v10.24.0
|
||||
github.com/elastic/go-elasticsearch/v8 v8.18.0
|
||||
github.com/getsentry/sentry-go v0.32.0
|
||||
github.com/go-playground/validator/v10 v10.26.0
|
||||
github.com/go-redis/redis v6.15.9+incompatible
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/gorilla/mux v1.8.1
|
||||
github.com/jackc/pgconn v1.14.3
|
||||
github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438
|
||||
github.com/jackc/pgtype v1.14.4
|
||||
github.com/jackc/pgx/v4 v4.18.3
|
||||
github.com/klauspost/compress v1.17.11
|
||||
github.com/klauspost/compress v1.18.0
|
||||
github.com/klauspost/pgzip v1.2.6
|
||||
github.com/lib/pq v1.10.9
|
||||
github.com/oschwald/maxminddb-golang v1.13.1
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/prometheus/client_golang v1.20.5
|
||||
github.com/prometheus/client_golang v1.22.0
|
||||
github.com/rs/xid v1.6.0
|
||||
github.com/sethvargo/go-envconfig v1.1.0
|
||||
github.com/sethvargo/go-envconfig v1.2.0
|
||||
github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce
|
||||
github.com/ua-parser/uap-go v0.0.0-20250126222208-a52596c19dff
|
||||
github.com/ua-parser/uap-go v0.0.0-20250326155420-f7f5a2f9f5bc
|
||||
go.uber.org/zap v1.27.0
|
||||
golang.org/x/net v0.35.0
|
||||
golang.org/x/net v0.39.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
|
||||
github.com/ClickHouse/ch-go v0.65.0 // indirect
|
||||
github.com/DataDog/zstd v1.5.6 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
||||
github.com/ClickHouse/ch-go v0.65.1 // indirect
|
||||
github.com/DataDog/zstd v1.5.7 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/elastic/elastic-transport-go/v8 v8.6.0 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
|
||||
github.com/elastic/elastic-transport-go/v8 v8.7.0 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.9 // indirect
|
||||
github.com/go-faster/city v1.0.1 // indirect
|
||||
github.com/go-faster/errors v0.7.1 // indirect
|
||||
github.com/go-logr/logr v1.4.2 // indirect
|
||||
|
|
@ -66,23 +68,23 @@ require (
|
|||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/paulmach/orb v0.11.1 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||
github.com/prometheus/client_model v0.6.1 // indirect
|
||||
github.com/prometheus/common v0.62.0 // indirect
|
||||
github.com/prometheus/procfs v0.15.1 // indirect
|
||||
github.com/prometheus/client_model v0.6.2 // indirect
|
||||
github.com/prometheus/common v0.63.0 // indirect
|
||||
github.com/prometheus/procfs v0.16.0 // indirect
|
||||
github.com/segmentio/asm v1.2.0 // indirect
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/sirupsen/logrus v1.9.3 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/otel v1.34.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.34.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.34.0 // indirect
|
||||
go.opentelemetry.io/otel v1.35.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.35.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.35.0 // indirect
|
||||
go.uber.org/multierr v1.11.0 // indirect
|
||||
golang.org/x/crypto v0.33.0 // indirect
|
||||
golang.org/x/oauth2 v0.25.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/text v0.22.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250127172529-29210b9bc287 // indirect
|
||||
google.golang.org/protobuf v1.36.4 // indirect
|
||||
golang.org/x/crypto v0.37.0 // indirect
|
||||
golang.org/x/oauth2 v0.29.0 // indirect
|
||||
golang.org/x/sys v0.32.0 // indirect
|
||||
golang.org/x/text v0.24.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
|
|
|||
|
|
@ -6,10 +6,17 @@ github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkk
|
|||
github.com/AlecAivazis/survey/v2 v2.3.7/go.mod h1:xUTIdE4KCOIjsBAE1JYsUPoCqYdZ1reCfTwbto0Fduo=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 h1:g0EZJwz7xkXQiZAI5xi9f3WWFYBlX1CPTrR+NDToRkQ=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0/go.mod h1:XCW7KnZet0Opnr7HccfUw1PLc4CjHqpcaxW8DHklNkQ=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 h1:B/dfvscEQtew9dVuoxqxrUKKv8Ih2f55PydknDamU+g=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0/go.mod h1:fiPSssYvltE08HJchL04dOy+RD4hgrjph0cwGGMntdI=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2 h1:F0gBpfdPLGsw+nsgk6aqqkZS1jiixa5WwFe3fk/T3Ys=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.0 h1:Bg8m3nq/X1DeePkAbCfb6ml6F3F0IunEhE8TMh+lY48=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.0/go.mod h1:j2chePtV91HrC22tGoRX3sGY42uF13WzmmV80/OdVAA=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 h1:FPKJS1T+clwv+OLGt13a8UjqeRuh0O4SJ3lUriThc+4=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1/go.mod h1:j2chePtV91HrC22tGoRX3sGY42uF13WzmmV80/OdVAA=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.6.0 h1:PiSrjRPpkQNjrM8H0WwKMnZUdu1RGMtd/LdGKUrOo+c=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.6.0/go.mod h1:oDrbWx4ewMylP7xHivfgixbfGBT6APAwsSoHRKotnIc=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.0 h1:UXT0o77lXQrikd1kgwIPQOUect7EoR/+sbP4wQKdzxM=
|
||||
|
|
@ -18,19 +25,28 @@ github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOEl
|
|||
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2 h1:kYRSnvJju5gYVyhkij+RTJ/VR6QIUaCfWeaFm2ycsjQ=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/ClickHouse/ch-go v0.63.1 h1:s2JyZvWLTCSAGdtjMBBmAgQQHMco6pawLJMOXi0FODM=
|
||||
github.com/ClickHouse/ch-go v0.63.1/go.mod h1:I1kJJCL3WJcBMGe1m+HVK0+nREaG+JOYYBWjrDrF3R0=
|
||||
github.com/ClickHouse/ch-go v0.65.0 h1:vZAXfTQliuNNefqkPDewX3kgRxN6Q4vUENnnY+ynTRY=
|
||||
github.com/ClickHouse/ch-go v0.65.0/go.mod h1:tCM0XEH5oWngoi9Iu/8+tjPBo04I/FxNIffpdjtwx3k=
|
||||
github.com/ClickHouse/ch-go v0.65.1 h1:SLuxmLl5Mjj44/XbINsK2HFvzqup0s6rwKLFH347ZhU=
|
||||
github.com/ClickHouse/ch-go v0.65.1/go.mod h1:bsodgURwmrkvkBe5jw1qnGDgyITsYErfONKAHn05nv4=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.30.1 h1:Dy0n0l+cMbPXs8hFkeeWGaPKrB+MDByUNQBSmRO3W6k=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.30.1/go.mod h1:szk8BMoQV/NgHXZ20ZbwDyvPWmpfhRKjFkc6wzASGxM=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.32.1 h1:RLhkxA6iH/bLTXeDtEj/u4yUx9Q03Y95P+cjHScQK78=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.32.1/go.mod h1:YtaiIFlHCGNPbOpAvFGYobtcVnmgYvD/WmzitixxWYc=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.34.0 h1:Y4rqkdrRHgExvC4o/NTbLdY5LFQ3LHS77/RNFxFX3Co=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.34.0/go.mod h1:yioSINoRLVZkLyDzdMXPLRIqhDvel8iLBlwh6Iefso8=
|
||||
github.com/DataDog/datadog-api-client-go/v2 v2.34.0 h1:0VVmv8uZg8vdBuEpiF2nBGUezl2QITrxdEsLgh38j8M=
|
||||
github.com/DataDog/datadog-api-client-go/v2 v2.34.0/go.mod h1:d3tOEgUd2kfsr9uuHQdY+nXrWp4uikgTgVCPdKNK30U=
|
||||
github.com/DataDog/datadog-api-client-go/v2 v2.37.1 h1:weZhrGMO//sMEoSKWngoSQwMp4zBSlEX4p3/YWy9ltw=
|
||||
github.com/DataDog/datadog-api-client-go/v2 v2.37.1/go.mod h1:d3tOEgUd2kfsr9uuHQdY+nXrWp4uikgTgVCPdKNK30U=
|
||||
github.com/DataDog/zstd v1.5.6 h1:LbEglqepa/ipmmQJUDnSsfvA8e8IStVcGaFWDuxvGOY=
|
||||
github.com/DataDog/zstd v1.5.6/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw=
|
||||
github.com/DataDog/zstd v1.5.7 h1:ybO8RBeh29qrxIhCA9E8gKY6xfONU9T6G6aP9DTKfLE=
|
||||
github.com/DataDog/zstd v1.5.7/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw=
|
||||
github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=
|
||||
github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
|
||||
github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
|
||||
|
|
@ -97,6 +113,8 @@ github.com/compose-spec/compose-go/v2 v2.1.3 h1:bD67uqLuL/XgkAK6ir3xZvNLFPxPScEi
|
|||
github.com/compose-spec/compose-go/v2 v2.1.3/go.mod h1:lFN0DrMxIncJGYAXTfWuajfwj5haBJqrBkarHcnjJKc=
|
||||
github.com/confluentinc/confluent-kafka-go/v2 v2.8.0 h1:0HlcSNWg4LpLA9nIjzUMIqWHI+w0S68UN7alXAc3TeA=
|
||||
github.com/confluentinc/confluent-kafka-go/v2 v2.8.0/go.mod h1:hScqtFIGUI1wqHIgM3mjoqEou4VweGGGX7dMpcUKves=
|
||||
github.com/confluentinc/confluent-kafka-go/v2 v2.10.0 h1:TK5CH5RbIj/aVfmJFEsDUT6vD2izac2zmA5BUfAOxC0=
|
||||
github.com/confluentinc/confluent-kafka-go/v2 v2.10.0/go.mod h1:hScqtFIGUI1wqHIgM3mjoqEou4VweGGGX7dMpcUKves=
|
||||
github.com/containerd/console v1.0.4 h1:F2g4+oChYvBTsASRTz8NP6iIAi97J3TtSAsLbIFn4ro=
|
||||
github.com/containerd/console v1.0.4/go.mod h1:YynlIjWYF8myEu6sdkwKIvGQq+cOckRm6So2avqoYAk=
|
||||
github.com/containerd/containerd v1.7.18 h1:jqjZTQNfXGoEaZdW1WwPU0RqSn1Bm2Ay/KJPUuO8nao=
|
||||
|
|
@ -148,10 +166,14 @@ github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203 h1:XBBHcIb256gUJ
|
|||
github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203/go.mod h1:E1jcSv8FaEny+OP/5k9UxZVw9YFWGj7eI4KR/iOBqCg=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.6.0 h1:Y2S/FBjx1LlCv5m6pWAF2kDJAHoSjSRSJCApolgfthA=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.6.0/go.mod h1:YLHer5cj0csTzNFXoNQ8qhtGY1GTvSqPnKWKaqQE3Hk=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.7.0 h1:OgTneVuXP2uip4BA658Xi6Hfw+PeIOod2rY3GVMGoVE=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.7.0/go.mod h1:YLHer5cj0csTzNFXoNQ8qhtGY1GTvSqPnKWKaqQE3Hk=
|
||||
github.com/elastic/go-elasticsearch/v7 v7.17.10 h1:TCQ8i4PmIJuBunvBS6bwT2ybzVFxxUhhltAs3Gyu1yo=
|
||||
github.com/elastic/go-elasticsearch/v7 v7.17.10/go.mod h1:OJ4wdbtDNk5g503kvlHLyErCgQwwzmDtaFC4XyOxXA4=
|
||||
github.com/elastic/go-elasticsearch/v8 v8.17.0 h1:e9cWksE/Fr7urDRmGPGp47Nsp4/mvNOrU8As1l2HQQ0=
|
||||
github.com/elastic/go-elasticsearch/v8 v8.17.0/go.mod h1:lGMlgKIbYoRvay3xWBeKahAiJOgmFDsjZC39nmO3H64=
|
||||
github.com/elastic/go-elasticsearch/v8 v8.18.0 h1:ANNq1h7DEiPUaALb8+5w3baQzaS08WfHV0DNzp0VG4M=
|
||||
github.com/elastic/go-elasticsearch/v8 v8.18.0/go.mod h1:WLqwXsJmQoYkoA9JBFeEwPkQhCfAZuUvfpdU/NvSSf0=
|
||||
github.com/emicklei/go-restful/v3 v3.11.0 h1:rAQeMHw1c7zTmncogyy8VvRZwtkmkZ4FxERmMY4rD+g=
|
||||
github.com/emicklei/go-restful/v3 v3.11.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc=
|
||||
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||
|
|
@ -163,8 +185,12 @@ github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM4
|
|||
github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0=
|
||||
github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM=
|
||||
github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8=
|
||||
github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY=
|
||||
github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok=
|
||||
github.com/getsentry/sentry-go v0.31.1 h1:ELVc0h7gwyhnXHDouXkhqTFSO5oslsRDk0++eyE0KJ4=
|
||||
github.com/getsentry/sentry-go v0.31.1/go.mod h1:CYNcMMz73YigoHljQRG+qPF+eMq8gG72XcGN/p71BAY=
|
||||
github.com/getsentry/sentry-go v0.32.0 h1:YKs+//QmwE3DcYtfKRH8/KyOOF/I6Qnx7qYGNHCGmCY=
|
||||
github.com/getsentry/sentry-go v0.32.0/go.mod h1:CYNcMMz73YigoHljQRG+qPF+eMq8gG72XcGN/p71BAY=
|
||||
github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA=
|
||||
github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og=
|
||||
github.com/go-faster/city v1.0.1 h1:4WAxSZ3V2Ws4QRDrscLEDcibJY8uf41H6AhXDrNDcGw=
|
||||
|
|
@ -194,6 +220,8 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn
|
|||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.24.0 h1:KHQckvo8G6hlWnrPX4NJJ+aBfWNAE/HH+qdL2cBpCmg=
|
||||
github.com/go-playground/validator/v10 v10.24.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1x/7cByt++cQ+YOuDM5wus=
|
||||
github.com/go-playground/validator/v10 v10.26.0 h1:SP05Nqhjcvz81uJaRfEV0YBSSSGMc/iMaVtFbr3Sw2k=
|
||||
github.com/go-playground/validator/v10 v10.26.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
|
||||
github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg=
|
||||
github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
|
|
@ -211,6 +239,8 @@ github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
|
|||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
|
||||
|
|
@ -222,6 +252,7 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
|||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
|
||||
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
|
|
@ -328,6 +359,8 @@ github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6
|
|||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
|
||||
github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
|
||||
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
||||
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
|
||||
github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU=
|
||||
github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
|
|
@ -441,12 +474,20 @@ github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF
|
|||
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
||||
github.com/prometheus/client_golang v1.20.5 h1:cxppBPuYhUnsO6yo/aoRol4L7q7UFfdm+bR9r+8l63Y=
|
||||
github.com/prometheus/client_golang v1.20.5/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE=
|
||||
github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q=
|
||||
github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0=
|
||||
github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E=
|
||||
github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY=
|
||||
github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk=
|
||||
github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE=
|
||||
github.com/prometheus/common v0.62.0 h1:xasJaQlnWAeyHdUBeGjXmutelfJHWMRr+Fg4QszZ2Io=
|
||||
github.com/prometheus/common v0.62.0/go.mod h1:vyBcEuLSvWos9B1+CyL7JZ2up+uFzXhkqml0W5zIY1I=
|
||||
github.com/prometheus/common v0.63.0 h1:YR/EIY1o3mEFP/kZCD7iDMnLPlGyuU2Gb3HIcXnA98k=
|
||||
github.com/prometheus/common v0.63.0/go.mod h1:VVFF/fBIoToEnWRVkYoXEkq3R3paCoxG9PXP74SnV18=
|
||||
github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc=
|
||||
github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk=
|
||||
github.com/prometheus/procfs v0.16.0 h1:xh6oHhKwnOJKMYiYBDWmkHqQPyiY40sny36Cmx2bbsM=
|
||||
github.com/prometheus/procfs v0.16.0/go.mod h1:8veyXUu3nGP7oaCxhX6yeaM5u4stL2FeMXnCqhDthZg=
|
||||
github.com/r3labs/sse v0.0.0-20210224172625-26fe804710bc h1:zAsgcP8MhzAbhMnB1QQ2O7ZhWYVGYSR2iVcjzQuPV+o=
|
||||
github.com/r3labs/sse v0.0.0-20210224172625-26fe804710bc/go.mod h1:S8xSOnV3CgpNrWd0GQ/OoQfMtlg2uPRSuTzcSGrzwK8=
|
||||
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
|
||||
|
|
@ -468,6 +509,8 @@ github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b h1:h+3JX2VoWTFuyQ
|
|||
github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b/go.mod h1:/yeG0My1xr/u+HZrFQ1tOQQQQrOawfyMUH13ai5brBc=
|
||||
github.com/sethvargo/go-envconfig v1.1.0 h1:cWZiJxeTm7AlCvzGXrEXaSTCNgip5oJepekh/BOQuog=
|
||||
github.com/sethvargo/go-envconfig v1.1.0/go.mod h1:JLd0KFWQYzyENqnEPWWZ49i4vzZo/6nRidxI8YvGiHw=
|
||||
github.com/sethvargo/go-envconfig v1.2.0 h1:q3XkOZWkC+G1sMLCrw9oPGTjYexygLOXDmGUit1ti8Q=
|
||||
github.com/sethvargo/go-envconfig v1.2.0/go.mod h1:JLd0KFWQYzyENqnEPWWZ49i4vzZo/6nRidxI8YvGiHw=
|
||||
github.com/shibumi/go-pathspec v1.3.0 h1:QUyMZhFo0Md5B8zV8x2tesohbb5kfbpTi9rBnKh5dkI=
|
||||
github.com/shibumi/go-pathspec v1.3.0/go.mod h1:Xutfslp817l2I1cZvgcfeMQJG5QnU2lh5tVaaMCl3jE=
|
||||
github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI=
|
||||
|
|
@ -528,6 +571,8 @@ github.com/tonistiigi/vt100 v0.0.0-20240514184818-90bafcd6abab h1:H6aJ0yKQ0gF49Q
|
|||
github.com/tonistiigi/vt100 v0.0.0-20240514184818-90bafcd6abab/go.mod h1:ulncasL3N9uLrVann0m+CDlJKWsIAP34MPcOJF6VRvc=
|
||||
github.com/ua-parser/uap-go v0.0.0-20250126222208-a52596c19dff h1:NwMEGwb7JJ8wPjT8OPKP5hO1Xz6AQ7Z00+GLSJfW21s=
|
||||
github.com/ua-parser/uap-go v0.0.0-20250126222208-a52596c19dff/go.mod h1:BUbeWZiieNxAuuADTBNb3/aeje6on3DhU3rpWsQSB1E=
|
||||
github.com/ua-parser/uap-go v0.0.0-20250326155420-f7f5a2f9f5bc h1:reH9QQKGFOq39MYOvU9+SYrB8uzXtWNo51fWK3g0gGc=
|
||||
github.com/ua-parser/uap-go v0.0.0-20250326155420-f7f5a2f9f5bc/go.mod h1:gwANdYmo9R8LLwGnyDFWK2PMsaXXX2HhAvCnb/UhZsM=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
|
||||
github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g=
|
||||
github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8=
|
||||
|
|
@ -557,6 +602,8 @@ go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u
|
|||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw=
|
||||
go.opentelemetry.io/otel v1.34.0 h1:zRLXxLCgL1WyKsPVrgbSdMN4c0FMkDAskSTQP+0hdUY=
|
||||
go.opentelemetry.io/otel v1.34.0/go.mod h1:OWFPOQ+h4G8xpyjgqo4SxJYdDQ/qmRH+wivy7zzx9oI=
|
||||
go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ=
|
||||
go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0 h1:ZtfnDL+tUrs1F0Pzfwbg2d59Gru9NCH3bgSHBM6LDwU=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0/go.mod h1:hG4Fj/y8TR/tlEDREo8tWstl9fO9gcFkn4xrx0Io8xU=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.42.0 h1:NmnYCiR0qNufkldjVvyQfZTHSdzeHoZ41zggMsdMcLM=
|
||||
|
|
@ -571,12 +618,16 @@ go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0 h1:digkE
|
|||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0/go.mod h1:/OpE/y70qVkndM0TrxT4KBoN3RsFZP0QaofcfYrj76I=
|
||||
go.opentelemetry.io/otel/metric v1.34.0 h1:+eTR3U0MyfWjRDhmFMxe2SsW64QrZ84AOhvqS7Y+PoQ=
|
||||
go.opentelemetry.io/otel/metric v1.34.0/go.mod h1:CEDrp0fy2D0MvkXE+dPV7cMi8tWZwX3dmaIhwPOaqHE=
|
||||
go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M=
|
||||
go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE=
|
||||
go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucgoDw=
|
||||
go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.21.0 h1:smhI5oD714d6jHE6Tie36fPx4WDFIg+Y6RfAY4ICcR0=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.21.0/go.mod h1:FJ8RAsoPGv/wYMgBdUJXOm+6pzFY3YdljnXtv1SBE8Q=
|
||||
go.opentelemetry.io/otel/trace v1.34.0 h1:+ouXS2V8Rd4hp4580a8q23bg0azF2nI8cqLYnC8mh/k=
|
||||
go.opentelemetry.io/otel/trace v1.34.0/go.mod h1:Svm7lSjQD7kG7KJ/MUHPVXSDGz2OX4h0M2jHBhmSfRE=
|
||||
go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs=
|
||||
go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc=
|
||||
go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I=
|
||||
go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM=
|
||||
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||
|
|
@ -617,6 +668,10 @@ golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc=
|
|||
golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc=
|
||||
golang.org/x/crypto v0.33.0 h1:IOBPskki6Lysi0lo9qQvbxiQ+FvsCC/YWOecCHAixus=
|
||||
golang.org/x/crypto v0.33.0/go.mod h1:bVdXmD7IV/4GdElGPozy6U7lWdRXA4qyRVGJV57uQ5M=
|
||||
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
|
||||
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
|
||||
golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE=
|
||||
golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc=
|
||||
golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3 h1:hNQpMuAJe5CtcUqCXaWga3FHu+kQvCqcsoVaQgSV60o=
|
||||
golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3/go.mod h1:idGWGoKP1toJGkd5/ig9ZLuPcZBC3ewk7SzmH0uou08=
|
||||
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
|
|
@ -643,8 +698,14 @@ golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
|
|||
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
|
||||
golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8=
|
||||
golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk=
|
||||
golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8=
|
||||
golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
|
||||
golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY=
|
||||
golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E=
|
||||
golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70=
|
||||
golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.29.0 h1:WdYw2tdTK1S8olAzWHdgeqfy+Mtm9XNhv/xJsY65d98=
|
||||
golang.org/x/oauth2 v0.29.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
|
|
@ -679,6 +740,10 @@ golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
|||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
|
||||
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20=
|
||||
golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
|
|
@ -700,6 +765,10 @@ golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
|
|||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
|
||||
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
|
||||
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
|
||||
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
|
||||
golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0=
|
||||
golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU=
|
||||
golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U=
|
||||
golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
|
|
@ -727,12 +796,18 @@ google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237 h1:
|
|||
google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237/go.mod h1:Z5Iiy3jtmioajWHDGFk7CeugTyHtPvMHA4UTmUkyalE=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250127172529-29210b9bc287 h1:J1H9f+LEdWAfHcez/4cvaVBox7cOYT+IU6rgqj5x++8=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250127172529-29210b9bc287/go.mod h1:8BS3B93F/U1juMFq9+EDk+qOT5CO1R9IzXxG3PTqiRk=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e h1:ztQaXfzEXTmCBvbtWYRhJxW+0iJcz2qXfd38/e9l7bA=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
|
||||
google.golang.org/grpc v1.64.1 h1:LKtvyfbX3UGVPFcGqJ9ItpVWW6oN/2XqTxfAnwRRXiA=
|
||||
google.golang.org/grpc v1.64.1/go.mod h1:hiQF4LFZelK2WKaP6W0L92zGHtiQdZxk8CrSdvyjeP0=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.36.4 h1:6A3ZDJHn/eNqc1i+IdefRzy/9PokBTPvcqMySR7NNIM=
|
||||
google.golang.org/protobuf v1.36.4/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
||||
google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM=
|
||||
google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
||||
google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
|
||||
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
|
||||
gopkg.in/cenkalti/backoff.v1 v1.1.0 h1:Arh75ttbsvlpVA7WtVpH4u9h6Zl46xuptxqLxPiSo4Y=
|
||||
gopkg.in/cenkalti/backoff.v1 v1.1.0/go.mod h1:J6Vskwqd+OMVJl8C33mmtxTBs2gyzfv7UDAkHu8BrjI=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ package datasaver
|
|||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"encoding/json"
|
||||
"openreplay/backend/internal/config/db"
|
||||
"openreplay/backend/pkg/db/clickhouse"
|
||||
"openreplay/backend/pkg/db/postgres"
|
||||
|
|
@ -50,10 +50,6 @@ func New(log logger.Logger, cfg *db.Config, pg *postgres.Conn, ch clickhouse.Con
|
|||
}
|
||||
|
||||
func (s *saverImpl) Handle(msg Message) {
|
||||
if msg.TypeID() == MsgCustomEvent {
|
||||
defer s.Handle(types.WrapCustomEvent(msg.(*CustomEvent)))
|
||||
}
|
||||
|
||||
var (
|
||||
sessCtx = context.WithValue(context.Background(), "sessionID", msg.SessionID())
|
||||
session *sessions.Session
|
||||
|
|
@ -69,6 +65,23 @@ func (s *saverImpl) Handle(msg Message) {
|
|||
return
|
||||
}
|
||||
|
||||
if msg.TypeID() == MsgCustomEvent {
|
||||
m := msg.(*CustomEvent)
|
||||
// Try to parse custom event payload to JSON and extract or_payload field
|
||||
type CustomEventPayload struct {
|
||||
CustomTimestamp uint64 `json:"or_timestamp"`
|
||||
}
|
||||
customPayload := &CustomEventPayload{}
|
||||
if err := json.Unmarshal([]byte(m.Payload), customPayload); err == nil {
|
||||
if customPayload.CustomTimestamp >= session.Timestamp {
|
||||
s.log.Info(sessCtx, "custom event timestamp received: %v", m.Timestamp)
|
||||
msg.Meta().Timestamp = customPayload.CustomTimestamp
|
||||
s.log.Info(sessCtx, "custom event timestamp updated: %v", m.Timestamp)
|
||||
}
|
||||
}
|
||||
defer s.Handle(types.WrapCustomEvent(m))
|
||||
}
|
||||
|
||||
if IsMobileType(msg.TypeID()) {
|
||||
if err := s.handleMobileMessage(sessCtx, session, msg); err != nil {
|
||||
if !postgres.IsPkeyViolation(err) {
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ package datasaver
|
|||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"openreplay/backend/pkg/db/postgres"
|
||||
"openreplay/backend/pkg/db/types"
|
||||
"openreplay/backend/pkg/messages"
|
||||
|
|
@ -141,6 +140,11 @@ func (s *saverImpl) handleWebMessage(sessCtx context.Context, session *sessions.
|
|||
return err
|
||||
}
|
||||
return s.ch.InsertWebPerformanceTrackAggr(session, m)
|
||||
case *messages.Incident:
|
||||
if err := s.pg.InsertIncident(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertIncident(session, m)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue