Compare commits
288 commits
main
...
inliner-te
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5053e83bf0 | ||
|
|
2a583d9638 | ||
|
|
fdbb223cd3 | ||
|
|
2bb3300a95 | ||
|
|
7e9d6b4761 | ||
|
|
bf79a4c893 | ||
|
|
dcf6fdb1c9 | ||
|
|
a4f6a93c59 | ||
|
|
c4ad390b3f | ||
|
|
d378b00bf7 | ||
|
|
bb6e2cbbdc | ||
|
|
6b30e261a5 | ||
|
|
f29e729acb | ||
|
|
56b6c6c7e6 | ||
|
|
0a5d4413ca | ||
|
|
ddb47631b6 | ||
|
|
04beacde61 | ||
|
|
9fec22319b | ||
|
|
dca5e54811 | ||
|
|
4e331b70a4 | ||
|
|
5cccaaa782 | ||
|
|
58d1f7c145 | ||
|
|
cbb930379d | ||
|
|
b9e6bd6e72 | ||
|
|
6f3058f9f9 | ||
|
|
c71db6c441 | ||
|
|
3200107d71 | ||
|
|
526dfd7e21 | ||
|
|
e92ba42d82 | ||
|
|
b1b21937ed | ||
|
|
55d435be87 | ||
|
|
4972cfad94 | ||
|
|
5c24594d5b | ||
|
|
27e20c4ef1 | ||
|
|
3edea4acb4 | ||
|
|
5304dbf8c1 | ||
|
|
0c64003b09 | ||
|
|
f38b3a830d | ||
|
|
500fb44856 | ||
|
|
2a483b62f0 | ||
|
|
c9b29c5c3d | ||
|
|
889fde91a9 | ||
|
|
eb7f3fb7a0 | ||
|
|
d58031caf6 | ||
|
|
83b6b6a3dd | ||
|
|
db5fc57e43 | ||
|
|
b4fd3def10 | ||
|
|
d477862edf | ||
|
|
821db5c0d5 | ||
|
|
4a9a082896 | ||
|
|
b109dd559a | ||
|
|
de04e23c51 | ||
|
|
c6b0649613 | ||
|
|
d2d886b322 | ||
|
|
a009ff928c | ||
|
|
a13f427816 | ||
|
|
8a69316b82 | ||
|
|
1576208e25 | ||
|
|
3ac5c30c5f | ||
|
|
39d3d8db4c | ||
|
|
812983f97c | ||
|
|
1df4a92901 | ||
|
|
d9fe534223 | ||
|
|
95d4df7a1b | ||
|
|
b3cb8df65b | ||
|
|
18f8ee9d15 | ||
|
|
2eb4ab4b84 | ||
|
|
a3fdad3de1 | ||
|
|
5ee2b125c8 | ||
|
|
360d6ca382 | ||
|
|
b477f9637d | ||
|
|
626c453a80 | ||
|
|
859100213d | ||
|
|
ec6076c0b3 | ||
|
|
878c10f723 | ||
|
|
2c0a75b11f | ||
|
|
718b3f3223 | ||
|
|
9f6dc788c4 | ||
|
|
9b9248f1f6 | ||
|
|
f591e886d7 | ||
|
|
cb0ea165f9 | ||
|
|
db404c0648 | ||
|
|
e4d75467ef | ||
|
|
69b8e2e774 | ||
|
|
160370f45e | ||
|
|
53f3623481 | ||
|
|
324299170e | ||
|
|
f5f47103c3 | ||
|
|
70390920cd | ||
|
|
447a2490ef | ||
|
|
ad3f72a10b | ||
|
|
70d2bbd9b9 | ||
|
|
bbdde7be81 | ||
|
|
22d71ceb14 | ||
|
|
53797500bf | ||
|
|
7217959992 | ||
|
|
effdfaef2c | ||
|
|
0c3bac0fe0 | ||
|
|
1f55f8241c | ||
|
|
8b75dfa149 | ||
|
|
94642d2871 | ||
|
|
33f571acc4 | ||
|
|
b886e9b242 | ||
|
|
f963ff394d | ||
|
|
a26411f2a6 | ||
|
|
ee71625499 | ||
|
|
7d6f838d25 | ||
|
|
089539ef7e | ||
|
|
373d71e4f3 | ||
|
|
cde427ae4c | ||
|
|
7cfef90cc8 | ||
|
|
04db655776 | ||
|
|
b91f5df89f | ||
|
|
7fd741348c | ||
|
|
2aaafa5b22 | ||
|
|
11f9b865cf | ||
|
|
60a691bbaf | ||
|
|
3f1f6c03f2 | ||
|
|
dcd19e3c83 | ||
|
|
ced855568f | ||
|
|
c8483df795 | ||
|
|
d544da0665 | ||
|
|
408c3122d3 | ||
|
|
c196736c3c | ||
|
|
d47542830f | ||
|
|
055ff8f64a | ||
|
|
2bf92f40f7 | ||
|
|
f0f78341e7 | ||
|
|
dbb805189f | ||
|
|
e32dbe2ee2 | ||
|
|
3272f5b9fd | ||
|
|
ea4e2ab198 | ||
|
|
990e1fa1c4 | ||
|
|
5ca97ceedd | ||
|
|
d3b8c35058 | ||
|
|
1b851a8b72 | ||
|
|
553e3f6045 | ||
|
|
3f73bae22f | ||
|
|
9160b42113 | ||
|
|
36e1a2fca2 | ||
|
|
cbbd480cca | ||
|
|
77ae0cac0e | ||
|
|
5771323800 | ||
|
|
aab8691cf5 | ||
|
|
d9ff3f4691 | ||
|
|
09c2ce0976 | ||
|
|
0141a42911 | ||
|
|
b55e44d450 | ||
|
|
f70cce7e23 | ||
|
|
8b3be469b6 | ||
|
|
dc975bc19a | ||
|
|
c1d51b98a2 | ||
|
|
5a51bfb984 | ||
|
|
b55b9e5515 | ||
|
|
af7b46516f | ||
|
|
05e0306823 | ||
|
|
77a8371543 | ||
|
|
e4406ad26b | ||
|
|
a8971d842b | ||
|
|
c003057cf0 | ||
|
|
586472c7dd | ||
|
|
ecb192f16e | ||
|
|
6dc585417f | ||
|
|
264444c92a | ||
|
|
b2fcd7094b | ||
|
|
f3b98dad8a | ||
|
|
c27213c65d | ||
|
|
f61c5e99b5 | ||
|
|
6412f14b08 | ||
|
|
0a620c6ba3 | ||
|
|
685741f039 | ||
|
|
4ee78e1a5c | ||
|
|
77735d9d72 | ||
|
|
e3065e0530 | ||
|
|
d9d4221ad3 | ||
|
|
0bbde3e75a | ||
|
|
7dec8bb943 | ||
|
|
c6a5ed6c3b | ||
|
|
99d62fa549 | ||
|
|
c0bb05bc0f | ||
|
|
70258e5c1d | ||
|
|
6ec146b24b | ||
|
|
9f464e3b41 | ||
|
|
e95bdab478 | ||
|
|
421b3d1dc5 | ||
|
|
437a25fb97 | ||
|
|
cb55a17227 | ||
|
|
9d160abda5 | ||
|
|
3758cf6565 | ||
|
|
9db5e2a8f7 | ||
|
|
e0dba41065 | ||
|
|
8fbaf25799 | ||
|
|
65072f607f | ||
|
|
cb4bf932c4 | ||
|
|
20b938365c | ||
|
|
8e68ebd52b | ||
|
|
293382ea85 | ||
|
|
ac35bf5179 | ||
|
|
eb610d1c21 | ||
|
|
ac0ccb2169 | ||
|
|
20a57d7ca1 | ||
|
|
856e716507 | ||
|
|
bb17f672fe | ||
|
|
d087736df0 | ||
|
|
ce546bcfa3 | ||
|
|
9f681aca45 | ||
|
|
0500f30d14 | ||
|
|
ec2c42c688 | ||
|
|
7f0bc100f5 | ||
|
|
522a985ef3 | ||
|
|
634d0e8a0f | ||
|
|
28b4fc7598 | ||
|
|
0d4c256ca8 | ||
|
|
35f63a8fb1 | ||
|
|
a4e96822ed | ||
|
|
96f984a76a | ||
|
|
5f15dfafe7 | ||
|
|
b9cca6b388 | ||
|
|
712f07988e | ||
|
|
08bddb3165 | ||
|
|
3efb879cdf | ||
|
|
ccf44fda70 | ||
|
|
ce525a4ccf | ||
|
|
c6299c4592 | ||
|
|
a371c79151 | ||
|
|
f59a8c24f4 | ||
|
|
8be6f63711 | ||
|
|
8ba35b1324 | ||
|
|
28dea3b225 | ||
|
|
666643a6ae | ||
|
|
4cf688f15c | ||
|
|
1e57c90449 | ||
|
|
c0678bab15 | ||
|
|
187a69a61a | ||
|
|
2e96a072e9 | ||
|
|
5a410e63b3 | ||
|
|
300a857a5c | ||
|
|
eba22e0efa | ||
|
|
664f6b9014 | ||
|
|
5bbd7cff10 | ||
|
|
6f172d4f01 | ||
|
|
829e1c8bde | ||
|
|
e7d309dadf | ||
|
|
4bac12308a | ||
|
|
2aba1d9a52 | ||
|
|
1f4e32e4f2 | ||
|
|
49f98967d6 | ||
|
|
356fa02094 | ||
|
|
a8e47e59ad | ||
|
|
c760d29fb4 | ||
|
|
d77a518cf0 | ||
|
|
e04c2aa251 | ||
|
|
e6eb41536d | ||
|
|
4b3ad60565 | ||
|
|
90669b0604 | ||
|
|
f4bf1b8960 | ||
|
|
70423c6d8e | ||
|
|
ae313c17d4 | ||
|
|
0e45fa53ad | ||
|
|
fe20f83130 | ||
|
|
d04e6686ca | ||
|
|
6adb45e15f | ||
|
|
a1337faeee | ||
|
|
7e065ab02f | ||
|
|
1e2dde09b4 | ||
|
|
3cdfe76134 | ||
|
|
39855651d5 | ||
|
|
dd469d2349 | ||
|
|
3d448320bf | ||
|
|
7b0771a581 | ||
|
|
988b396223 | ||
|
|
fa3b585785 | ||
|
|
91e0ebeb56 | ||
|
|
8e68eb9a20 | ||
|
|
13bd3d9121 | ||
|
|
048ae0913c | ||
|
|
73fff8b817 | ||
|
|
605fa96a34 | ||
|
|
2cb33d7894 | ||
|
|
15d427418d | ||
|
|
ed3e553726 | ||
|
|
7eace68de6 | ||
|
|
8009882cef | ||
|
|
7365d8639c | ||
|
|
4c967d4bc1 | ||
|
|
3fdf799bd7 | ||
|
|
9aca716e6b | ||
|
|
cf9ecdc9a4 |
413 changed files with 22959 additions and 8846 deletions
|
|
@ -47,6 +47,7 @@ runs:
|
|||
"JWT_SECRET:.global.jwtSecret"
|
||||
"JWT_SPOT_REFRESH_SECRET:.chalice.env.JWT_SPOT_REFRESH_SECRET"
|
||||
"JWT_SPOT_SECRET:.global.jwtSpotSecret"
|
||||
"JWT_SECRET:.global.tokenSecret"
|
||||
"LICENSE_KEY:.global.enterpriseEditionLicense"
|
||||
"MINIO_ACCESS_KEY:.global.s3.accessKey"
|
||||
"MINIO_SECRET_KEY:.global.s3.secretKey"
|
||||
|
|
|
|||
8
.github/workflows/patch-build-old.yaml
vendored
8
.github/workflows/patch-build-old.yaml
vendored
|
|
@ -8,11 +8,7 @@ on:
|
|||
required: true
|
||||
default: 'chalice,frontend'
|
||||
tag:
|
||||
description: 'Tag to update.'
|
||||
required: true
|
||||
type: string
|
||||
branch:
|
||||
description: 'Branch to build patches from. Make sure the branch is uptodate with tag. Else itll cause missing commits.'
|
||||
description: 'Tag to build patches from.'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
|
|
@ -77,7 +73,7 @@ jobs:
|
|||
- name: Get HEAD Commit ID
|
||||
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||
- name: Define Branch Name
|
||||
run: echo "BRANCH_NAME=${{inputs.branch}}" >> $GITHUB_ENV
|
||||
run: echo "BRANCH_NAME=patch/main/${HEAD_COMMIT_ID}" >> $GITHUB_ENV
|
||||
|
||||
- name: Build
|
||||
id: build-image
|
||||
|
|
|
|||
243
.github/workflows/patch-build.yaml
vendored
243
.github/workflows/patch-build.yaml
vendored
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
description: 'This workflow will build for patches for latest tag, and will Always use commit from main branch.'
|
||||
inputs:
|
||||
services:
|
||||
description: 'Comma separated names of services to build(in small letters).'
|
||||
|
|
@ -19,20 +20,12 @@ jobs:
|
|||
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
fetch-depth: 1
|
||||
- name: Rebase with main branch, to make sure the code has latest main changes
|
||||
if: github.ref != 'refs/heads/main'
|
||||
run: |
|
||||
git remote -v
|
||||
git config --global user.email "action@github.com"
|
||||
git config --global user.name "GitHub Action"
|
||||
git config --global rebase.autoStash true
|
||||
git fetch origin main:main
|
||||
git rebase main
|
||||
git log -3
|
||||
git pull --rebase origin main
|
||||
|
||||
- name: Downloading yq
|
||||
run: |
|
||||
|
|
@ -55,8 +48,6 @@ jobs:
|
|||
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }}
|
||||
|
||||
- uses: depot/setup-action@v1
|
||||
env:
|
||||
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
|
||||
- name: Get HEAD Commit ID
|
||||
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||
- name: Define Branch Name
|
||||
|
|
@ -74,168 +65,78 @@ jobs:
|
|||
MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
|
||||
MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
|
||||
MSAAS_REPO_FOLDER: /tmp/msaas
|
||||
SERVICES_INPUT: ${{ github.event.inputs.services }}
|
||||
run: |
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
# Configuration
|
||||
readonly WORKING_DIR=$(pwd)
|
||||
readonly BUILD_SCRIPT_NAME="build.sh"
|
||||
readonly BACKEND_SERVICES_FILE="/tmp/backend.txt"
|
||||
|
||||
# Initialize git configuration
|
||||
setup_git() {
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
git checkout -b "$BRANCH_NAME"
|
||||
set -exo pipefail
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
git checkout -b $BRANCH_NAME
|
||||
working_dir=$(pwd)
|
||||
function image_version(){
|
||||
local service=$1
|
||||
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
||||
current_version=$(yq eval '.AppVersion' $chart_path)
|
||||
new_version=$(echo $current_version | awk -F. '{$NF += 1 ; print $1"."$2"."$3}')
|
||||
echo $new_version
|
||||
# yq eval ".AppVersion = \"$new_version\"" -i $chart_path
|
||||
}
|
||||
|
||||
# Get and increment image version
|
||||
image_version() {
|
||||
local service=$1
|
||||
local chart_path="$WORKING_DIR/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
||||
local current_version new_version
|
||||
|
||||
current_version=$(yq eval '.AppVersion' "$chart_path")
|
||||
new_version=$(echo "$current_version" | awk -F. '{$NF += 1; print $1"."$2"."$3}')
|
||||
echo "$new_version"
|
||||
function clone_msaas() {
|
||||
[ -d $MSAAS_REPO_FOLDER ] || {
|
||||
git clone -b dev --recursive https://x-access-token:$MSAAS_REPO_CLONE_TOKEN@$MSAAS_REPO_URL $MSAAS_REPO_FOLDER
|
||||
cd $MSAAS_REPO_FOLDER
|
||||
cd openreplay && git fetch origin && git checkout main # This have to be changed to specific tag
|
||||
git log -1
|
||||
cd $MSAAS_REPO_FOLDER
|
||||
bash git-init.sh
|
||||
git checkout
|
||||
}
|
||||
}
|
||||
|
||||
# Clone MSAAS repository if not exists
|
||||
clone_msaas() {
|
||||
if [[ ! -d "$MSAAS_REPO_FOLDER" ]]; then
|
||||
git clone -b dev --recursive "https://x-access-token:${MSAAS_REPO_CLONE_TOKEN}@${MSAAS_REPO_URL}" "$MSAAS_REPO_FOLDER"
|
||||
cd "$MSAAS_REPO_FOLDER"
|
||||
cd openreplay && git fetch origin && git checkout main
|
||||
git log -1
|
||||
cd "$MSAAS_REPO_FOLDER"
|
||||
bash git-init.sh
|
||||
git checkout
|
||||
fi
|
||||
function build_managed() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
echo building managed
|
||||
clone_msaas
|
||||
if [[ $service == 'chalice' ]]; then
|
||||
cd $MSAAS_REPO_FOLDER/openreplay/api
|
||||
else
|
||||
cd $MSAAS_REPO_FOLDER/openreplay/$service
|
||||
fi
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh >> /tmp/arm.txt
|
||||
}
|
||||
|
||||
# Build managed services
|
||||
build_managed() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
|
||||
echo "Building managed service: $service"
|
||||
clone_msaas
|
||||
|
||||
if [[ $service == 'chalice' ]]; then
|
||||
cd "$MSAAS_REPO_FOLDER/openreplay/api"
|
||||
else
|
||||
cd "$MSAAS_REPO_FOLDER/openreplay/$service"
|
||||
fi
|
||||
|
||||
local build_cmd="IMAGE_TAG=$version DOCKER_RUNTIME=depot DOCKER_BUILD_ARGS=--push ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh"
|
||||
|
||||
echo "Executing: $build_cmd"
|
||||
if ! eval "$build_cmd" 2>&1; then
|
||||
echo "Build failed for $service"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Build service with given arguments
|
||||
build_service() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
local build_args=$3
|
||||
local build_script=${4:-$BUILD_SCRIPT_NAME}
|
||||
|
||||
local command="IMAGE_TAG=$version DOCKER_RUNTIME=depot DOCKER_BUILD_ARGS=--push ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash $build_script $build_args"
|
||||
echo "Executing: $command"
|
||||
eval "$command"
|
||||
}
|
||||
|
||||
# Update chart version and commit changes
|
||||
update_chart_version() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
local chart_path="$WORKING_DIR/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
||||
|
||||
# Ensure we're in the original working directory/repository
|
||||
cd "$WORKING_DIR"
|
||||
yq eval ".AppVersion = \"$version\"" -i "$chart_path"
|
||||
git add "$chart_path"
|
||||
git commit -m "Increment $service chart version to $version"
|
||||
git push --set-upstream origin "$BRANCH_NAME"
|
||||
cd -
|
||||
}
|
||||
|
||||
# Main execution
|
||||
main() {
|
||||
setup_git
|
||||
|
||||
# Get backend services list
|
||||
ls backend/cmd >"$BACKEND_SERVICES_FILE"
|
||||
|
||||
# Parse services input (fix for GitHub Actions syntax)
|
||||
echo "Services: ${SERVICES_INPUT:-$1}"
|
||||
IFS=',' read -ra services <<<"${SERVICES_INPUT:-$1}"
|
||||
|
||||
# Process each service
|
||||
for service in "${services[@]}"; do
|
||||
echo "Processing service: $service"
|
||||
cd "$WORKING_DIR"
|
||||
|
||||
local foss_build_args="" ee_build_args="" build_script="$BUILD_SCRIPT_NAME"
|
||||
|
||||
# Determine build configuration based on service type
|
||||
if grep -q "$service" "$BACKEND_SERVICES_FILE"; then
|
||||
# Backend service
|
||||
cd backend
|
||||
foss_build_args="nil $service"
|
||||
ee_build_args="ee $service"
|
||||
else
|
||||
# Non-backend service
|
||||
case "$service" in
|
||||
chalice | alerts | crons)
|
||||
cd "$WORKING_DIR/api"
|
||||
;;
|
||||
*)
|
||||
cd "$service"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Special build scripts for alerts/crons
|
||||
if [[ $service == 'alerts' || $service == 'crons' ]]; then
|
||||
build_script="build_${service}.sh"
|
||||
fi
|
||||
|
||||
ee_build_args="ee"
|
||||
fi
|
||||
|
||||
# Get version and build
|
||||
local version
|
||||
version=$(image_version "$service")
|
||||
|
||||
# Build FOSS and EE versions
|
||||
build_service "$service" "$version" "$foss_build_args"
|
||||
build_service "$service" "${version}-ee" "$ee_build_args"
|
||||
|
||||
# Build managed version for specific services
|
||||
if [[ "$service" != "chalice" && "$service" != "frontend" ]]; then
|
||||
echo "Nothing to build in managed for service $service"
|
||||
else
|
||||
build_managed "$service" "$version"
|
||||
fi
|
||||
|
||||
# Update chart and commit
|
||||
update_chart_version "$service" "$version"
|
||||
done
|
||||
cd "$WORKING_DIR"
|
||||
|
||||
# Cleanup
|
||||
rm -f "$BACKEND_SERVICES_FILE"
|
||||
}
|
||||
|
||||
echo "Working directory: $WORKING_DIR"
|
||||
# Run main function with all arguments
|
||||
main "$SERVICES_INPUT"
|
||||
|
||||
# Checking for backend images
|
||||
ls backend/cmd >> /tmp/backend.txt
|
||||
echo Services: "${{ github.event.inputs.services }}"
|
||||
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}"
|
||||
BUILD_SCRIPT_NAME="build.sh"
|
||||
# Build FOSS
|
||||
for SERVICE in "${SERVICES[@]}"; do
|
||||
# Check if service is backend
|
||||
if grep -q $SERVICE /tmp/backend.txt; then
|
||||
cd backend
|
||||
foss_build_args="nil $SERVICE"
|
||||
ee_build_args="ee $SERVICE"
|
||||
else
|
||||
[[ $SERVICE == 'chalice' || $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && cd $working_dir/api || cd $SERVICE
|
||||
[[ $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && BUILD_SCRIPT_NAME="build_${SERVICE}.sh"
|
||||
ee_build_args="ee"
|
||||
fi
|
||||
version=$(image_version $SERVICE)
|
||||
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
echo IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
|
||||
IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
|
||||
if [[ "$SERVICE" != "chalice" && "$SERVICE" != "frontend" ]]; then
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
else
|
||||
build_managed $SERVICE $version
|
||||
fi
|
||||
cd $working_dir
|
||||
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$SERVICE/Chart.yaml"
|
||||
yq eval ".AppVersion = \"$version\"" -i $chart_path
|
||||
git add $chart_path
|
||||
git commit -m "Increment $SERVICE chart version"
|
||||
git push --set-upstream origin $BRANCH_NAME
|
||||
done
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: repo-sync/pull-request@v2
|
||||
|
|
|
|||
18
.github/workflows/tracker-tests.yaml
vendored
18
.github/workflows/tracker-tests.yaml
vendored
|
|
@ -22,22 +22,14 @@ jobs:
|
|||
- name: Cache tracker modules
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: tracker/tracker/node_modules
|
||||
key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/bun.lockb') }}
|
||||
restore-keys: |
|
||||
test_tracker_build{{ runner.OS }}-build-
|
||||
test_tracker_build{{ runner.OS }}-
|
||||
- name: Cache tracker-assist modules
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: tracker/tracker-assist/node_modules
|
||||
key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/bun.lockb') }}
|
||||
path: tracker/node_modules
|
||||
key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/bun.lock') }}
|
||||
restore-keys: |
|
||||
test_tracker_build{{ runner.OS }}-build-
|
||||
test_tracker_build{{ runner.OS }}-
|
||||
- name: Setup Testing packages
|
||||
run: |
|
||||
cd tracker/tracker
|
||||
cd tracker
|
||||
bun install
|
||||
- name: Jest tests
|
||||
run: |
|
||||
|
|
@ -47,10 +39,6 @@ jobs:
|
|||
run: |
|
||||
cd tracker/tracker
|
||||
bun run build
|
||||
- name: (TA) Setup Testing packages
|
||||
run: |
|
||||
cd tracker/tracker-assist
|
||||
bun install
|
||||
- name: (TA) Jest tests
|
||||
run: |
|
||||
cd tracker/tracker-assist
|
||||
|
|
|
|||
3
.github/workflows/update-tag.yaml
vendored
3
.github/workflows/update-tag.yaml
vendored
|
|
@ -33,10 +33,11 @@ jobs:
|
|||
- name: Set Remote with GITHUB_TOKEN
|
||||
run: |
|
||||
git config --unset http.https://github.com/.extraheader
|
||||
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}
|
||||
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git
|
||||
|
||||
- name: Push main branch to tag
|
||||
run: |
|
||||
git fetch --tags
|
||||
git checkout main
|
||||
echo "Updating tag ${{ env.LATEST_TAG }} to point to latest commit on main"
|
||||
git push origin HEAD:refs/tags/${{ env.LATEST_TAG }} --force
|
||||
|
|
|
|||
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -7,3 +7,4 @@ node_modules
|
|||
**/*.envrc
|
||||
.idea
|
||||
*.mob*
|
||||
install-state.gz
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
repos:
|
||||
- repo: https://github.com/gitguardian/ggshield
|
||||
rev: v1.14.5
|
||||
rev: v1.38.0
|
||||
hooks:
|
||||
- id: ggshield
|
||||
language_version: python3
|
||||
stages: [commit]
|
||||
stages: [pre-commit]
|
||||
|
|
|
|||
11
api/Pipfile
11
api/Pipfile
|
|
@ -6,16 +6,15 @@ name = "pypi"
|
|||
[packages]
|
||||
urllib3 = "==2.3.0"
|
||||
requests = "==2.32.3"
|
||||
boto3 = "==1.36.12"
|
||||
boto3 = "==1.37.21"
|
||||
pyjwt = "==2.10.1"
|
||||
psycopg2-binary = "==2.9.10"
|
||||
psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
|
||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
|
||||
psycopg = {extras = ["pool", "binary"], version = "==3.2.6"}
|
||||
clickhouse-connect = "==0.8.15"
|
||||
elasticsearch = "==8.17.1"
|
||||
elasticsearch = "==8.17.2"
|
||||
jira = "==3.8.0"
|
||||
cachetools = "==5.5.1"
|
||||
fastapi = "==0.115.8"
|
||||
cachetools = "==5.5.2"
|
||||
fastapi = "==0.115.12"
|
||||
uvicorn = {extras = ["standard"], version = "==0.34.0"}
|
||||
python-decouple = "==3.8"
|
||||
pydantic = {extras = ["email"], version = "==2.10.6"}
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ from chalicelib.utils import helper
|
|||
from chalicelib.utils import pg_client, ch_client
|
||||
from crons import core_crons, core_dynamic_crons
|
||||
from routers import core, core_dynamic
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_analytics
|
||||
|
||||
loglevel = config("LOGLEVEL", default=logging.WARNING)
|
||||
print(f">Loglevel set to: {loglevel}")
|
||||
|
|
@ -129,6 +129,6 @@ app.include_router(spot.public_app)
|
|||
app.include_router(spot.app)
|
||||
app.include_router(spot.app_apikey)
|
||||
|
||||
app.include_router(product_anaytics.public_app)
|
||||
app.include_router(product_anaytics.app)
|
||||
app.include_router(product_anaytics.app_apikey)
|
||||
app.include_router(product_analytics.public_app, prefix="/pa")
|
||||
app.include_router(product_analytics.app, prefix="/pa")
|
||||
app.include_router(product_analytics.app_apikey, prefix="/pa")
|
||||
|
|
|
|||
|
|
@ -0,0 +1,11 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
|
||||
if config("EXP_AUTOCOMPLETE", cast=bool, default=False):
|
||||
logging.info(">>> Using experimental autocomplete")
|
||||
from . import autocomplete_ch as autocomplete
|
||||
else:
|
||||
from . import autocomplete
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
import logging
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import metadata
|
||||
from chalicelib.core.errors import errors_legacy
|
||||
|
|
@ -7,6 +9,8 @@ from chalicelib.utils import ch_client, exp_ch_helper
|
|||
from chalicelib.utils import helper, metrics_helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _multiple_values(values, value_key="value"):
|
||||
query_values = {}
|
||||
|
|
@ -378,9 +382,9 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
|
|||
ORDER BY timestamp) AS sub_table
|
||||
GROUP BY error_id) AS chart_details ON details.error_id=chart_details.error_id;"""
|
||||
|
||||
# print("------------")
|
||||
# print(ch.format(main_ch_query, params))
|
||||
# print("------------")
|
||||
logger.debug("------------")
|
||||
logger.debug(ch.format(main_ch_query, params))
|
||||
logger.debug("------------")
|
||||
query = ch.format(query=main_ch_query, parameters=params)
|
||||
|
||||
rows = ch.execute(query=query)
|
||||
|
|
|
|||
|
|
@ -50,8 +50,8 @@ class JIRAIntegration(base.BaseIntegration):
|
|||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""SELECT username, token, url
|
||||
FROM public.jira_cloud
|
||||
WHERE user_id = %(user_id)s;""",
|
||||
FROM public.jira_cloud
|
||||
WHERE user_id=%(user_id)s;""",
|
||||
{"user_id": self._user_id})
|
||||
)
|
||||
data = helper.dict_to_camel_case(cur.fetchone())
|
||||
|
|
@ -95,9 +95,10 @@ class JIRAIntegration(base.BaseIntegration):
|
|||
def add(self, username, token, url, obfuscate=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(""" \
|
||||
INSERT INTO public.jira_cloud(username, token, user_id, url)
|
||||
VALUES (%(username)s, %(token)s, %(user_id)s, %(url)s) RETURNING username, token, url;""",
|
||||
cur.mogrify("""\
|
||||
INSERT INTO public.jira_cloud(username, token, user_id,url)
|
||||
VALUES (%(username)s, %(token)s, %(user_id)s,%(url)s)
|
||||
RETURNING username, token, url;""",
|
||||
{"user_id": self._user_id, "username": username,
|
||||
"token": token, "url": url})
|
||||
)
|
||||
|
|
@ -111,10 +112,9 @@ class JIRAIntegration(base.BaseIntegration):
|
|||
def delete(self):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(""" \
|
||||
DELETE
|
||||
FROM public.jira_cloud
|
||||
WHERE user_id = %(user_id)s;""",
|
||||
cur.mogrify("""\
|
||||
DELETE FROM public.jira_cloud
|
||||
WHERE user_id=%(user_id)s;""",
|
||||
{"user_id": self._user_id})
|
||||
)
|
||||
return {"state": "success"}
|
||||
|
|
@ -125,7 +125,7 @@ class JIRAIntegration(base.BaseIntegration):
|
|||
changes={
|
||||
"username": data.username,
|
||||
"token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \
|
||||
else self.integration["token"],
|
||||
else self.integration.token,
|
||||
"url": str(data.url)
|
||||
},
|
||||
obfuscate=True
|
||||
|
|
|
|||
|
|
@ -241,3 +241,25 @@ def get_colname_by_key(project_id, key):
|
|||
return None
|
||||
|
||||
return index_to_colname(meta_keys[key])
|
||||
|
||||
|
||||
def get_for_filters(project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""SELECT {",".join(column_names())}
|
||||
FROM public.projects
|
||||
WHERE project_id = %(project_id)s
|
||||
AND deleted_at ISNULL
|
||||
LIMIT 1;""", {"project_id": project_id})
|
||||
cur.execute(query=query)
|
||||
metas = cur.fetchone()
|
||||
results = []
|
||||
if metas is not None:
|
||||
for i, k in enumerate(metas.keys()):
|
||||
if metas[k] is not None:
|
||||
results.append({"id": f"meta_{i}",
|
||||
"name": k,
|
||||
"displayName": metas[k],
|
||||
"possibleTypes": ["String"],
|
||||
"autoCaptured": False,
|
||||
"icon": None})
|
||||
return {"total": len(results), "list": results}
|
||||
|
|
|
|||
|
|
@ -241,14 +241,13 @@ def create_card(project: schemas.ProjectContext, user_id, data: schemas.CardSche
|
|||
params["card_info"] = json.dumps(params["card_info"])
|
||||
|
||||
query = """INSERT INTO metrics (project_id, user_id, name, is_public,
|
||||
view_type, metric_type, metric_of, metric_value,
|
||||
metric_format, default_config, thumbnail, data,
|
||||
card_info)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
|
||||
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
|
||||
%(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s,
|
||||
%(card_info)s)
|
||||
RETURNING metric_id"""
|
||||
view_type, metric_type, metric_of, metric_value,
|
||||
metric_format, default_config, thumbnail, data,
|
||||
card_info)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
|
||||
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
|
||||
%(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s,
|
||||
%(card_info)s) RETURNING metric_id"""
|
||||
if len(data.series) > 0:
|
||||
query = f"""WITH m AS ({query})
|
||||
INSERT INTO metric_series(metric_id, index, name, filter)
|
||||
|
|
@ -525,13 +524,13 @@ def get_all(project_id, user_id):
|
|||
def delete_card(project_id, metric_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.metrics
|
||||
SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now())
|
||||
WHERE project_id = %(project_id)s
|
||||
AND metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public)
|
||||
RETURNING data;""",
|
||||
cur.mogrify(""" \
|
||||
UPDATE public.metrics
|
||||
SET deleted_at = timezone('utc'::text, now()),
|
||||
edited_at = timezone('utc'::text, now())
|
||||
WHERE project_id = %(project_id)s
|
||||
AND metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public) RETURNING data;""",
|
||||
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id})
|
||||
)
|
||||
|
||||
|
|
@ -615,13 +614,14 @@ def get_series_for_alert(project_id, user_id):
|
|||
FALSE AS predefined,
|
||||
metric_id,
|
||||
series_id
|
||||
FROM metric_series
|
||||
INNER JOIN metrics USING (metric_id)
|
||||
WHERE metrics.deleted_at ISNULL
|
||||
AND metrics.project_id = %(project_id)s
|
||||
AND metrics.metric_type = 'timeseries'
|
||||
AND (user_id = %(user_id)s OR is_public)
|
||||
ORDER BY name;""",
|
||||
FROM metric_series
|
||||
INNER JOIN metrics USING (metric_id)
|
||||
WHERE metrics.deleted_at ISNULL
|
||||
AND metrics.project_id = %(project_id)s
|
||||
AND metrics.metric_type = 'timeseries'
|
||||
AND (user_id = %(user_id)s
|
||||
OR is_public)
|
||||
ORDER BY name;""",
|
||||
{"project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
)
|
||||
|
|
@ -632,11 +632,11 @@ def get_series_for_alert(project_id, user_id):
|
|||
def change_state(project_id, metric_id, user_id, status):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.metrics
|
||||
SET active = %(status)s
|
||||
WHERE metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
cur.mogrify(""" \
|
||||
UPDATE public.metrics
|
||||
SET active = %(status)s
|
||||
WHERE metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
{"metric_id": metric_id, "status": status, "user_id": user_id})
|
||||
)
|
||||
return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id)
|
||||
|
|
@ -674,7 +674,8 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
|
|||
"issue": issue}
|
||||
|
||||
|
||||
def make_chart_from_card(project: schemas.ProjectContext, user_id, metric_id, data: schemas.CardSessionsSchema):
|
||||
def make_chart_from_card(project: schemas.ProjectContext, user_id, metric_id,
|
||||
data: schemas.CardSessionsSchema, for_dashboard: bool = False):
|
||||
raw_metric: dict = get_card(metric_id=metric_id, project_id=project.project_id, user_id=user_id, include_data=True)
|
||||
|
||||
if raw_metric is None:
|
||||
|
|
@ -693,7 +694,8 @@ def make_chart_from_card(project: schemas.ProjectContext, user_id, metric_id, da
|
|||
return heatmaps.search_short_session(project_id=project.project_id,
|
||||
data=schemas.HeatMapSessionsSearch(**metric.model_dump()),
|
||||
user_id=user_id)
|
||||
|
||||
elif metric.metric_type == schemas.MetricType.PATH_ANALYSIS and for_dashboard:
|
||||
metric.hide_excess = True
|
||||
return get_chart(project=project, data=metric, user_id=user_id)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from chalicelib.utils import helper
|
|||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
|
||||
def filter_stages(stages: List[schemas.SessionSearchEventSchema2]):
|
||||
def filter_stages(stages: List[schemas.SessionSearchEventSchema]):
|
||||
ALLOW_TYPES = [schemas.EventType.CLICK, schemas.EventType.INPUT,
|
||||
schemas.EventType.LOCATION, schemas.EventType.CUSTOM,
|
||||
schemas.EventType.CLICK_MOBILE, schemas.EventType.INPUT_MOBILE,
|
||||
|
|
@ -15,10 +15,10 @@ def filter_stages(stages: List[schemas.SessionSearchEventSchema2]):
|
|||
|
||||
|
||||
def __parse_events(f_events: List[dict]):
|
||||
return [schemas.SessionSearchEventSchema2.parse_obj(e) for e in f_events]
|
||||
return [schemas.SessionSearchEventSchema.parse_obj(e) for e in f_events]
|
||||
|
||||
|
||||
def __fix_stages(f_events: List[schemas.SessionSearchEventSchema2]):
|
||||
def __fix_stages(f_events: List[schemas.SessionSearchEventSchema]):
|
||||
if f_events is None:
|
||||
return
|
||||
events = []
|
||||
|
|
|
|||
|
|
@ -160,7 +160,7 @@ s.start_ts,
|
|||
s.duration"""
|
||||
|
||||
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema2 | None, session_id: str, project_id: int,
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema | None, session_id: str, project_id: int,
|
||||
start_time: int,
|
||||
end_time: int) -> str | None:
|
||||
full_args = {
|
||||
|
|
@ -240,13 +240,13 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i
|
|||
value=[schemas.PlatformType.DESKTOP],
|
||||
operator=schemas.SearchEventOperator.IS))
|
||||
if not location_condition:
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
if no_click:
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
|
||||
value=[0],
|
||||
|
|
|
|||
|
|
@ -24,8 +24,9 @@ def get_by_url(project_id, data: schemas.GetHeatMapPayloadSchema):
|
|||
"main_events.`$event_name` = 'CLICK'",
|
||||
"isNotNull(JSON_VALUE(CAST(main_events.`$properties` AS String), '$.normalized_x'))"
|
||||
]
|
||||
|
||||
if data.operator == schemas.SearchEventOperator.IS:
|
||||
if data.operator == schemas.SearchEventOperator.PATTERN:
|
||||
constraints.append("match(main_events.`$properties`.url_path'.:String,%(url)s)")
|
||||
elif data.operator == schemas.SearchEventOperator.IS:
|
||||
constraints.append("JSON_VALUE(CAST(main_events.`$properties` AS String), '$.url_path') = %(url)s")
|
||||
else:
|
||||
constraints.append("JSON_VALUE(CAST(main_events.`$properties` AS String), '$.url_path') ILIKE %(url)s")
|
||||
|
|
@ -179,7 +180,7 @@ toUnixTimestamp(s.datetime)*1000 AS start_ts,
|
|||
s.duration AS duration"""
|
||||
|
||||
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema2 | None, session_id: str, project_id: int,
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema | None, session_id: str, project_id: int,
|
||||
start_time: int,
|
||||
end_time: int) -> str | None:
|
||||
full_args = {
|
||||
|
|
@ -262,13 +263,13 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i
|
|||
value=[schemas.PlatformType.DESKTOP],
|
||||
operator=schemas.SearchEventOperator.IS))
|
||||
if not location_condition:
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
if no_click:
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
|
||||
value=[0],
|
||||
|
|
|
|||
|
|
@ -241,7 +241,7 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
|
|||
:return:
|
||||
"""
|
||||
|
||||
stages: List[schemas.SessionSearchEventSchema2] = filter_d.events
|
||||
stages: List[schemas.SessionSearchEventSchema] = filter_d.events
|
||||
filters: List[schemas.SessionSearchFilterSchema] = filter_d.filters
|
||||
|
||||
stage_constraints = ["main.timestamp <= %(endTimestamp)s"]
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas.ProjectContext,
|
||||
metric_format: schemas.MetricExtendedFormatType) -> List[RealDictRow]:
|
||||
stages: List[schemas.SessionSearchEventSchema2] = filter_d.events
|
||||
stages: List[schemas.SessionSearchEventSchema] = filter_d.events
|
||||
filters: List[schemas.SessionSearchFilterSchema] = filter_d.filters
|
||||
platform = project.platform
|
||||
constraints = ["e.project_id = %(project_id)s",
|
||||
|
|
|
|||
|
|
@ -1,14 +0,0 @@
|
|||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
|
||||
|
||||
def search_events(project_id: int, data: dict):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT *
|
||||
FROM taha.events
|
||||
WHERE project_id=%(project_id)s
|
||||
ORDER BY created_at;""",
|
||||
params={"project_id": project_id})
|
||||
x = ch_client.execute(r)
|
||||
|
||||
return x
|
||||
|
|
@ -6,6 +6,7 @@ from decouple import config
|
|||
import schemas
|
||||
from chalicelib.core.collaborations.collaboration_msteams import MSTeams
|
||||
from chalicelib.core.collaborations.collaboration_slack import Slack
|
||||
from chalicelib.core.modules import TENANT_CONDITION
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
|
@ -16,13 +17,13 @@ logger = logging.getLogger(__name__)
|
|||
def get_note(tenant_id, project_id, user_id, note_id, share=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
|
||||
{",(SELECT name FROM users WHERE tenant_id=%(tenant_id)s AND user_id=%(share)s AND deleted_at ISNULL) AS share_name" if share else ""}
|
||||
{f",(SELECT name FROM users WHERE {TENANT_CONDITION} AND user_id=%(share)s AND deleted_at ISNULL) AS share_name" if share else ""}
|
||||
FROM sessions_notes INNER JOIN users USING (user_id)
|
||||
WHERE sessions_notes.project_id = %(project_id)s
|
||||
AND sessions_notes.note_id = %(note_id)s
|
||||
AND sessions_notes.deleted_at IS NULL
|
||||
AND (sessions_notes.user_id = %(user_id)s
|
||||
OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s);""",
|
||||
OR sessions_notes.is_public AND {TENANT_CONDITION});""",
|
||||
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id,
|
||||
"note_id": note_id, "share": share})
|
||||
|
||||
|
|
@ -43,7 +44,7 @@ def get_session_notes(tenant_id, project_id, session_id, user_id):
|
|||
AND sessions_notes.deleted_at IS NULL
|
||||
AND sessions_notes.session_id = %(session_id)s
|
||||
AND (sessions_notes.user_id = %(user_id)s
|
||||
OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s)
|
||||
OR sessions_notes.is_public AND {TENANT_CONDITION})
|
||||
ORDER BY created_at DESC;""",
|
||||
{"project_id": project_id, "user_id": user_id,
|
||||
"tenant_id": tenant_id, "session_id": session_id})
|
||||
|
|
@ -62,7 +63,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se
|
|||
conditions = [
|
||||
"sessions_notes.project_id = %(project_id)s",
|
||||
"sessions_notes.deleted_at IS NULL",
|
||||
"users.tenant_id = %(tenant_id)s"
|
||||
TENANT_CONDITION
|
||||
]
|
||||
params = {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}
|
||||
|
||||
|
|
@ -127,7 +128,7 @@ def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNote
|
|||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public, thumbnail, start_at, end_at)
|
||||
VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s, %(thumbnail)s, %(start_at)s, %(end_at)s)
|
||||
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND users.tenant_id=%(tenant_id)s) AS user_name;""",
|
||||
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND {TENANT_CONDITION}) AS user_name;""",
|
||||
{"user_id": user_id, "project_id": project_id, "session_id": session_id,
|
||||
**data.model_dump(),
|
||||
"tenant_id": tenant_id})
|
||||
|
|
@ -161,7 +162,7 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot
|
|||
AND user_id = %(user_id)s
|
||||
AND note_id = %(note_id)s
|
||||
AND deleted_at ISNULL
|
||||
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND users.tenant_id=%(tenant_id)s) AS user_name;""",
|
||||
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND {TENANT_CONDITION}) AS user_name;""",
|
||||
{"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.model_dump(),
|
||||
"tenant_id": tenant_id})
|
||||
)
|
||||
0
api/chalicelib/core/product_analytics/__init__.py
Normal file
0
api/chalicelib/core/product_analytics/__init__.py
Normal file
59
api/chalicelib/core/product_analytics/autocomplete.py
Normal file
59
api/chalicelib/core/product_analytics/autocomplete.py
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
from typing import Optional
|
||||
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
|
||||
|
||||
def search_events(project_id: int, q: Optional[str] = None):
|
||||
with ClickHouseClient() as ch_client:
|
||||
full_args = {"project_id": project_id, "limit": 20}
|
||||
|
||||
constraints = ["project_id = %(project_id)s",
|
||||
"_timestamp >= now()-INTERVAL 1 MONTH"]
|
||||
if q:
|
||||
constraints += ["value ILIKE %(q)s"]
|
||||
full_args["q"] = helper.string_to_sql_like(q)
|
||||
query = ch_client.format(
|
||||
f"""SELECT value,data_count
|
||||
FROM product_analytics.autocomplete_events_grouped
|
||||
WHERE {" AND ".join(constraints)}
|
||||
ORDER BY data_count DESC
|
||||
LIMIT %(limit)s;""",
|
||||
parameters=full_args)
|
||||
rows = ch_client.execute(query)
|
||||
|
||||
return {"values": helper.list_to_camel_case(rows), "_src": 2}
|
||||
|
||||
|
||||
def search_properties(project_id: int, property_name: Optional[str] = None, event_name: Optional[str] = None,
|
||||
q: Optional[str] = None):
|
||||
with ClickHouseClient() as ch_client:
|
||||
select = "value"
|
||||
full_args = {"project_id": project_id, "limit": 20,
|
||||
"event_name": event_name, "property_name": property_name, "q": q,
|
||||
"property_name_l": helper.string_to_sql_like(property_name),
|
||||
"q_l": helper.string_to_sql_like(q)}
|
||||
|
||||
constraints = ["project_id = %(project_id)s",
|
||||
"_timestamp >= now()-INTERVAL 1 MONTH"]
|
||||
if event_name:
|
||||
constraints += ["event_name = %(event_name)s"]
|
||||
|
||||
if property_name and q:
|
||||
constraints += ["property_name = %(property_name)s"]
|
||||
elif property_name:
|
||||
select = "DISTINCT ON(property_name) property_name AS value"
|
||||
constraints += ["property_name ILIKE %(property_name_l)s"]
|
||||
|
||||
if q:
|
||||
constraints += ["value ILIKE %(q_l)s"]
|
||||
query = ch_client.format(
|
||||
f"""SELECT {select},data_count
|
||||
FROM product_analytics.autocomplete_event_properties_grouped
|
||||
WHERE {" AND ".join(constraints)}
|
||||
ORDER BY data_count DESC
|
||||
LIMIT %(limit)s;""",
|
||||
parameters=full_args)
|
||||
rows = ch_client.execute(query)
|
||||
|
||||
return {"values": helper.list_to_camel_case(rows), "_src": 2}
|
||||
182
api/chalicelib/core/product_analytics/events.py
Normal file
182
api/chalicelib/core/product_analytics/events.py
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
import logging
|
||||
|
||||
import schemas
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
from chalicelib.utils.exp_ch_helper import get_sub_condition, get_col_cast
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
PREDEFINED_EVENTS = {
|
||||
"CLICK": "String",
|
||||
"INPUT": "String",
|
||||
"LOCATION": "String",
|
||||
"ERROR": "String",
|
||||
"PERFORMANCE": "String",
|
||||
"REQUEST": "String"
|
||||
}
|
||||
|
||||
|
||||
def get_events(project_id: int, page: schemas.PaginatedSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT DISTINCT
|
||||
ON(event_name,auto_captured)
|
||||
COUNT (1) OVER () AS total,
|
||||
event_name AS name, display_name, description,
|
||||
auto_captured
|
||||
FROM product_analytics.all_events
|
||||
WHERE project_id=%(project_id)s
|
||||
ORDER BY auto_captured, display_name
|
||||
LIMIT %(limit)s
|
||||
OFFSET %(offset)s;""",
|
||||
parameters={"project_id": project_id, "limit": page.limit, "offset": (page.page - 1) * page.limit})
|
||||
rows = ch_client.execute(r)
|
||||
if len(rows) == 0:
|
||||
return {"total": len(PREDEFINED_EVENTS), "list": [{
|
||||
"name": e,
|
||||
"displayName": "",
|
||||
"description": "",
|
||||
"autoCaptured": True,
|
||||
"id": "event_0",
|
||||
"dataType": "string",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"_foundInPredefinedList": False
|
||||
} for e in PREDEFINED_EVENTS]}
|
||||
total = rows[0]["total"]
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
for i, row in enumerate(rows):
|
||||
row["id"] = f"event_{i}"
|
||||
row["dataType"] = "string"
|
||||
row["possibleTypes"] = ["string"]
|
||||
row["_foundInPredefinedList"] = True
|
||||
row.pop("total")
|
||||
keys = [r["name"] for r in rows]
|
||||
for e in PREDEFINED_EVENTS:
|
||||
if e not in keys:
|
||||
total += 1
|
||||
rows.append({
|
||||
"name": e,
|
||||
"displayName": "",
|
||||
"description": "",
|
||||
"autoCaptured": True,
|
||||
"id": "event_0",
|
||||
"dataType": "string",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"_foundInPredefinedList": False
|
||||
})
|
||||
return {"total": total, "list": rows}
|
||||
|
||||
|
||||
def search_events(project_id: int, data: schemas.EventsSearchPayloadSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp,
|
||||
"projectId": project_id, "limit": data.limit, "offset": (data.page - 1) * data.limit}
|
||||
|
||||
constraints = ["project_id = %(projectId)s",
|
||||
"created_at >= toDateTime(%(startDate)s/1000)",
|
||||
"created_at <= toDateTime(%(endDate)s/1000)"]
|
||||
ev_constraints = []
|
||||
for i, f in enumerate(data.filters):
|
||||
if not f.is_event:
|
||||
f.value = helper.values_for_operator(value=f.value, op=f.operator)
|
||||
f_k = f"f_value{i}"
|
||||
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
|
||||
is_any = sh.isAny_opreator(f.operator)
|
||||
is_undefined = sh.isUndefined_operator(f.operator)
|
||||
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
|
||||
if f.is_predefined:
|
||||
column = f.name
|
||||
else:
|
||||
column = f"properties.{f.name}"
|
||||
|
||||
if is_any:
|
||||
condition = f"notEmpty{column})"
|
||||
elif is_undefined:
|
||||
condition = f"empty({column})"
|
||||
else:
|
||||
condition = sh.multi_conditions(
|
||||
get_sub_condition(col_name=column, val_name=f_k, operator=f.operator),
|
||||
values=f.value, value_key=f_k)
|
||||
constraints.append(condition)
|
||||
|
||||
else:
|
||||
e_k = f"e_value{i}"
|
||||
full_args = {**full_args, e_k: f.name}
|
||||
condition = f"`$event_name` = %({e_k})s"
|
||||
sub_conditions = []
|
||||
for j, ef in enumerate(f.properties.filters):
|
||||
p_k = f"e_{i}_p_{j}"
|
||||
full_args = {**full_args, **sh.multi_values(ef.value, value_key=p_k, data_type=ef.data_type)}
|
||||
cast = get_col_cast(data_type=ef.data_type, value=ef.value)
|
||||
if ef.is_predefined:
|
||||
sub_condition = get_sub_condition(col_name=f"accurateCastOrNull(`{ef.name}`,'{cast}')",
|
||||
val_name=p_k, operator=ef.operator)
|
||||
else:
|
||||
sub_condition = get_sub_condition(col_name=f"accurateCastOrNull(properties.`{ef.name}`,{cast})",
|
||||
val_name=p_k, operator=ef.operator)
|
||||
sub_conditions.append(sh.multi_conditions(sub_condition, ef.value, value_key=p_k))
|
||||
if len(sub_conditions) > 0:
|
||||
condition += " AND (" + (" " + f.properties.operator + " ").join(sub_conditions) + ")"
|
||||
|
||||
ev_constraints.append(condition)
|
||||
|
||||
constraints.append("(" + " OR ".join(ev_constraints) + ")")
|
||||
query = ch_client.format(
|
||||
f"""SELECT COUNT(1) OVER () AS total,
|
||||
event_id,
|
||||
`$event_name`,
|
||||
created_at,
|
||||
`distinct_id`,
|
||||
`$browser`,
|
||||
`$import`,
|
||||
`$os`,
|
||||
`$country`,
|
||||
`$state`,
|
||||
`$city`,
|
||||
`$screen_height`,
|
||||
`$screen_width`,
|
||||
`$source`,
|
||||
`$user_id`,
|
||||
`$device`
|
||||
FROM product_analytics.events
|
||||
WHERE {" AND ".join(constraints)}
|
||||
ORDER BY created_at
|
||||
LIMIT %(limit)s OFFSET %(offset)s;""",
|
||||
parameters=full_args)
|
||||
rows = ch_client.execute(query)
|
||||
if len(rows) == 0:
|
||||
return {"total": 0, "rows": [], "_src": 2}
|
||||
total = rows[0]["total"]
|
||||
for r in rows:
|
||||
r.pop("total")
|
||||
return {"total": total, "rows": rows, "_src": 2}
|
||||
|
||||
|
||||
def get_lexicon(project_id: int, page: schemas.PaginatedSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT COUNT(1) OVER () AS total, all_events.event_name AS name,
|
||||
*
|
||||
FROM product_analytics.all_events
|
||||
WHERE project_id = %(project_id)s
|
||||
ORDER BY display_name
|
||||
LIMIT %(limit)s
|
||||
OFFSET %(offset)s;""",
|
||||
parameters={"project_id": project_id, "limit": page.limit, "offset": (page.page - 1) * page.limit})
|
||||
rows = ch_client.execute(r)
|
||||
if len(rows) == 0:
|
||||
return {"total": 0, "list": []}
|
||||
total = rows[0]["total"]
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
for i, row in enumerate(rows):
|
||||
row["id"] = f"event_{i}"
|
||||
row["dataType"] = "string"
|
||||
row["possibleTypes"] = ["string"]
|
||||
row["_foundInPredefinedList"] = True
|
||||
row.pop("total")
|
||||
return {"total": total, "list": rows}
|
||||
167
api/chalicelib/core/product_analytics/properties.py
Normal file
167
api/chalicelib/core/product_analytics/properties.py
Normal file
|
|
@ -0,0 +1,167 @@
|
|||
import schemas
|
||||
from chalicelib.utils import helper, exp_ch_helper
|
||||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
|
||||
PREDEFINED_PROPERTIES = {
|
||||
"label": "String",
|
||||
"hesitation_time": "UInt32",
|
||||
"name": "String",
|
||||
"payload": "String",
|
||||
"level": "Enum8",
|
||||
"source": "Enum8",
|
||||
"message": "String",
|
||||
"error_id": "String",
|
||||
"duration": "UInt16",
|
||||
"context": "Enum8",
|
||||
"url_host": "String",
|
||||
"url_path": "String",
|
||||
"url_hostpath": "String",
|
||||
"request_start": "UInt16",
|
||||
"response_start": "UInt16",
|
||||
"response_end": "UInt16",
|
||||
"dom_content_loaded_event_start": "UInt16",
|
||||
"dom_content_loaded_event_end": "UInt16",
|
||||
"load_event_start": "UInt16",
|
||||
"load_event_end": "UInt16",
|
||||
"first_paint": "UInt16",
|
||||
"first_contentful_paint_time": "UInt16",
|
||||
"speed_index": "UInt16",
|
||||
"visually_complete": "UInt16",
|
||||
"time_to_interactive": "UInt16",
|
||||
"ttfb": "UInt16",
|
||||
"ttlb": "UInt16",
|
||||
"response_time": "UInt16",
|
||||
"dom_building_time": "UInt16",
|
||||
"dom_content_loaded_event_time": "UInt16",
|
||||
"load_event_time": "UInt16",
|
||||
"min_fps": "UInt8",
|
||||
"avg_fps": "UInt8",
|
||||
"max_fps": "UInt8",
|
||||
"min_cpu": "UInt8",
|
||||
"avg_cpu": "UInt8",
|
||||
"max_cpu": "UInt8",
|
||||
"min_total_js_heap_size": "UInt64",
|
||||
"avg_total_js_heap_size": "UInt64",
|
||||
"max_total_js_heap_size": "UInt64",
|
||||
"min_used_js_heap_size": "UInt64",
|
||||
"avg_used_js_heap_size": "UInt64",
|
||||
"max_used_js_heap_size": "UInt64",
|
||||
"method": "Enum8",
|
||||
"status": "UInt16",
|
||||
"success": "UInt8",
|
||||
"request_body": "String",
|
||||
"response_body": "String",
|
||||
"transfer_size": "UInt32",
|
||||
"selector": "String",
|
||||
"normalized_x": "Float32",
|
||||
"normalized_y": "Float32",
|
||||
"message_id": "UInt64"
|
||||
}
|
||||
|
||||
|
||||
def get_all_properties(project_id: int, page: schemas.PaginatedSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT COUNT(1) OVER () AS total, property_name AS name,
|
||||
display_name,
|
||||
array_agg(DISTINCT event_properties.value_type) AS possible_types
|
||||
FROM product_analytics.all_properties
|
||||
LEFT JOIN product_analytics.event_properties USING (project_id, property_name)
|
||||
WHERE all_properties.project_id = %(project_id)s
|
||||
GROUP BY property_name, display_name
|
||||
ORDER BY display_name
|
||||
LIMIT %(limit)s
|
||||
OFFSET %(offset)s;""",
|
||||
parameters={"project_id": project_id,
|
||||
"limit": page.limit,
|
||||
"offset": (page.page - 1) * page.limit})
|
||||
properties = ch_client.execute(r)
|
||||
if len(properties) == 0:
|
||||
return {"total": 0, "list": []}
|
||||
total = properties[0]["total"]
|
||||
properties = helper.list_to_camel_case(properties)
|
||||
for i, p in enumerate(properties):
|
||||
p["id"] = f"prop_{i}"
|
||||
p["_foundInPredefinedList"] = False
|
||||
if p["name"] in PREDEFINED_PROPERTIES:
|
||||
p["dataType"] = exp_ch_helper.simplify_clickhouse_type(PREDEFINED_PROPERTIES[p["name"]])
|
||||
p["_foundInPredefinedList"] = True
|
||||
p["possibleTypes"] = list(set(exp_ch_helper.simplify_clickhouse_types(p["possibleTypes"])))
|
||||
p.pop("total")
|
||||
keys = [p["name"] for p in properties]
|
||||
for p in PREDEFINED_PROPERTIES:
|
||||
if p not in keys:
|
||||
total += 1
|
||||
properties.append({
|
||||
"name": p,
|
||||
"displayName": "",
|
||||
"possibleTypes": [
|
||||
],
|
||||
"id": f"prop_{len(properties) + 1}",
|
||||
"_foundInPredefinedList": False,
|
||||
"dataType": PREDEFINED_PROPERTIES[p]
|
||||
})
|
||||
return {"total": total, "list": properties}
|
||||
|
||||
|
||||
def get_event_properties(project_id: int, event_name):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT all_properties.property_name AS name,
|
||||
all_properties.display_name,
|
||||
array_agg(DISTINCT event_properties.value_type) AS possible_types
|
||||
FROM product_analytics.event_properties
|
||||
INNER JOIN product_analytics.all_properties USING (property_name)
|
||||
WHERE event_properties.project_id = %(project_id)s
|
||||
AND all_properties.project_id = %(project_id)s
|
||||
AND event_properties.event_name = %(event_name)s
|
||||
GROUP BY ALL
|
||||
ORDER BY 1;""",
|
||||
parameters={"project_id": project_id, "event_name": event_name})
|
||||
properties = ch_client.execute(r)
|
||||
properties = helper.list_to_camel_case(properties)
|
||||
for i, p in enumerate(properties):
|
||||
p["id"] = f"prop_{i}"
|
||||
p["_foundInPredefinedList"] = False
|
||||
if p["name"] in PREDEFINED_PROPERTIES:
|
||||
p["dataType"] = exp_ch_helper.simplify_clickhouse_type(PREDEFINED_PROPERTIES[p["name"]])
|
||||
p["_foundInPredefinedList"] = True
|
||||
p["possibleTypes"] = list(set(exp_ch_helper.simplify_clickhouse_types(p["possibleTypes"])))
|
||||
|
||||
return properties
|
||||
|
||||
|
||||
def get_lexicon(project_id: int, page: schemas.PaginatedSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT COUNT(1) OVER () AS total, all_properties.property_name AS name,
|
||||
all_properties.*,
|
||||
possible_types.values AS possible_types,
|
||||
possible_values.values AS sample_values
|
||||
FROM product_analytics.all_properties
|
||||
LEFT JOIN (SELECT project_id, property_name, array_agg(DISTINCT value_type) AS
|
||||
values
|
||||
FROM product_analytics.event_properties
|
||||
WHERE project_id=%(project_id)s
|
||||
GROUP BY 1, 2) AS possible_types
|
||||
USING (project_id, property_name)
|
||||
LEFT JOIN (SELECT project_id, property_name, array_agg(DISTINCT value) AS
|
||||
values
|
||||
FROM product_analytics.property_values_samples
|
||||
WHERE project_id=%(project_id)s
|
||||
GROUP BY 1, 2) AS possible_values USING (project_id, property_name)
|
||||
WHERE project_id = %(project_id)s
|
||||
ORDER BY display_name
|
||||
LIMIT %(limit)s
|
||||
OFFSET %(offset)s;""",
|
||||
parameters={"project_id": project_id,
|
||||
"limit": page.limit,
|
||||
"offset": (page.page - 1) * page.limit})
|
||||
properties = ch_client.execute(r)
|
||||
if len(properties) == 0:
|
||||
return {"total": 0, "list": []}
|
||||
total = properties[0]["total"]
|
||||
for i, p in enumerate(properties):
|
||||
p["id"] = f"prop_{i}"
|
||||
p.pop("total")
|
||||
return {"total": total, "list": helper.list_to_camel_case(properties)}
|
||||
|
|
@ -6,8 +6,18 @@ logger = logging.getLogger(__name__)
|
|||
from . import sessions_pg
|
||||
from . import sessions_pg as sessions_legacy
|
||||
from . import sessions_ch
|
||||
from . import sessions_search_pg
|
||||
from . import sessions_search_pg as sessions_search_legacy
|
||||
|
||||
if config("EXP_METRICS", cast=bool, default=False):
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental sessions search")
|
||||
from . import sessions_ch as sessions
|
||||
from . import sessions_search_ch as sessions_search
|
||||
else:
|
||||
from . import sessions_pg as sessions
|
||||
from . import sessions_search_pg as sessions_search
|
||||
|
||||
# if config("EXP_METRICS", cast=bool, default=False):
|
||||
# from . import sessions_ch as sessions
|
||||
# else:
|
||||
# from . import sessions_pg as sessions
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ from chalicelib.core import events, metadata
|
|||
from . import performance_event, sessions_legacy
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.exp_ch_helper import get_sub_condition, get_col_cast
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -48,8 +49,8 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
query = f"""SELECT gs.generate_series AS timestamp,
|
||||
COALESCE(COUNT(DISTINCT processed_sessions.user_id),0) AS count
|
||||
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS gs
|
||||
LEFT JOIN (SELECT multiIf(s.user_id IS NOT NULL AND s.user_id != '', s.user_id,
|
||||
s.user_anonymous_id IS NOT NULL AND s.user_anonymous_id != '',
|
||||
LEFT JOIN (SELECT multiIf(isNotNull(s.user_id) AND notEmpty(s.user_id), s.user_id,
|
||||
isNotNull(s.user_anonymous_id) AND notEmpty(s.user_anonymous_id),
|
||||
s.user_anonymous_id, toString(s.user_uuid)) AS user_id,
|
||||
s.datetime AS datetime
|
||||
{query_part}) AS processed_sessions ON(TRUE)
|
||||
|
|
@ -148,7 +149,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
for e in data.events:
|
||||
if e.type == schemas.EventType.LOCATION:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
|
|
@ -173,7 +174,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
for e in data.events:
|
||||
if e.type == schemas.EventType.REQUEST_DETAILS:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
|
|
@ -253,7 +254,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
FROM (SELECT s.user_id AS user_id {extra_col}
|
||||
{query_part}
|
||||
WHERE isNotNull(user_id)
|
||||
AND user_id != '') AS filtred_sessions
|
||||
AND notEmpty(user_id)) AS filtred_sessions
|
||||
{extra_where}
|
||||
GROUP BY {main_col}
|
||||
ORDER BY total DESC
|
||||
|
|
@ -277,7 +278,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
return sessions
|
||||
|
||||
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema):
|
||||
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
|
||||
schemas.EventType.GRAPHQL] \
|
||||
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
|
||||
|
|
@ -330,7 +331,11 @@ def json_condition(table_alias, json_column, json_key, op, values, value_key, ch
|
|||
extract_func = "JSONExtractFloat" if numeric_type == "float" else "JSONExtractInt"
|
||||
condition = f"{extract_func}(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s"
|
||||
else:
|
||||
condition = f"JSONExtractString(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s"
|
||||
# condition = f"JSONExtractString(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s"
|
||||
condition = get_sub_condition(
|
||||
col_name=f"JSONExtractString(toString({table_alias}.`{json_column}`), '{json_key}')",
|
||||
val_name=value_key, operator=op
|
||||
)
|
||||
|
||||
conditions.append(sh.multi_conditions(condition, values, value_key=value_key))
|
||||
|
||||
|
|
@ -660,7 +665,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
event.value = helper.values_for_operator(value=event.value, op=event.operator)
|
||||
full_args = {**full_args,
|
||||
**sh.multi_values(event.value, value_key=e_k),
|
||||
**sh.multi_values(event.source, value_key=s_k)}
|
||||
**sh.multi_values(event.source, value_key=s_k),
|
||||
e_k: event.value[0] if len(event.value) > 0 else event.value}
|
||||
|
||||
if event_type == events.EventType.CLICK.ui_type:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
|
|
@ -671,24 +677,44 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
if schemas.ClickEventExtraOperator.has_value(event.operator):
|
||||
event_where.append(json_condition(
|
||||
"main",
|
||||
"$properties",
|
||||
"selector", op, event.value, e_k)
|
||||
# event_where.append(json_condition(
|
||||
# "main",
|
||||
# "$properties",
|
||||
# "selector", op, event.value, e_k)
|
||||
# )
|
||||
event_where.append(
|
||||
sh.multi_conditions(
|
||||
get_sub_condition(col_name=f"main.`$properties`.selector",
|
||||
val_name=e_k, operator=event.operator),
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
if is_not:
|
||||
event_where.append(json_condition(
|
||||
"sub", "$properties", _column, op, event.value, e_k
|
||||
))
|
||||
# event_where.append(json_condition(
|
||||
# "sub", "$properties", _column, op, event.value, e_k
|
||||
# ))
|
||||
event_where.append(
|
||||
sh.multi_conditions(
|
||||
get_sub_condition(col_name=f"sub.`$properties`.{_column}",
|
||||
val_name=e_k, operator=event.operator),
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions_not.append(
|
||||
{
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"
|
||||
}
|
||||
)
|
||||
events_conditions_not[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
# event_where.append(
|
||||
# json_condition("main", "$properties", _column, op, event.value, e_k)
|
||||
# )
|
||||
event_where.append(
|
||||
json_condition("main", "$properties", _column, op, event.value, e_k)
|
||||
sh.multi_conditions(
|
||||
get_sub_condition(col_name=f"main.`$properties`.{_column}",
|
||||
val_name=e_k, operator=event.operator),
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
|
|
@ -870,12 +896,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
events_conditions[-1]["condition"] = []
|
||||
if not is_any and event.value not in [None, "*", ""]:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
|
||||
event.value, value_key=e_k))
|
||||
sh.multi_conditions(
|
||||
f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
|
||||
event.value, value_key=e_k))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
events_extra_join += f" AND {event_where[-1]}"
|
||||
if len(event.source) > 0 and event.source[0] not in [None, "*", ""]:
|
||||
event_where.append(sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source, value_key=s_k))
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source,
|
||||
value_key=s_k))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
events_extra_join += f" AND {event_where[-1]}"
|
||||
|
||||
|
|
@ -1222,8 +1251,36 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
else:
|
||||
logging.warning(f"undefined GRAPHQL filter: {f.type}")
|
||||
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
|
||||
elif event_type == schemas.EventType.EVENT:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = events.EventType.CLICK.column
|
||||
event_where.append(f"main.`$event_name`=%({e_k})s AND main.session_id>0")
|
||||
events_conditions.append({"type": event_where[-1], "condition": ""})
|
||||
|
||||
else:
|
||||
continue
|
||||
if event.properties is not None and len(event.properties.filters) > 0:
|
||||
sub_conditions = []
|
||||
for l, property in enumerate(event.properties.filters):
|
||||
a_k = f"{e_k}_att_{l}"
|
||||
full_args = {**full_args,
|
||||
**sh.multi_values(property.value, value_key=a_k, data_type=property.data_type)}
|
||||
cast = get_col_cast(data_type=property.data_type, value=property.value)
|
||||
if property.is_predefined:
|
||||
condition = get_sub_condition(col_name=f"accurateCastOrNull(main.`{property.name}`,'{cast}')",
|
||||
val_name=a_k, operator=property.operator)
|
||||
else:
|
||||
condition = get_sub_condition(
|
||||
col_name=f"accurateCastOrNull(main.properties.`{property.name}`,'{cast}')",
|
||||
val_name=a_k, operator=property.operator)
|
||||
event_where.append(
|
||||
sh.multi_conditions(condition, property.value, value_key=a_k)
|
||||
)
|
||||
sub_conditions.append(event_where[-1])
|
||||
if len(sub_conditions) > 0:
|
||||
sub_conditions = (" " + event.properties.operator + " ").join(sub_conditions)
|
||||
events_conditions[-1]["condition"] += " AND " if len(events_conditions[-1]["condition"]) > 0 else ""
|
||||
events_conditions[-1]["condition"] += "(" + sub_conditions + ")"
|
||||
if event_index == 0 or or_events:
|
||||
event_where += ss_constraints
|
||||
if is_not:
|
||||
|
|
@ -1449,7 +1506,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
if c_f.type == schemas.FetchFilterType.FETCH_URL.value:
|
||||
_extra_or_condition.append(
|
||||
sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s",
|
||||
c_f.value, value_key=e_k))
|
||||
c_f.value, value_key=e_k))
|
||||
else:
|
||||
logging.warning(f"unsupported extra_event type:${c.type}")
|
||||
if len(_extra_or_condition) > 0:
|
||||
|
|
@ -1521,18 +1578,15 @@ def get_user_sessions(project_id, user_id, start_date, end_date):
|
|||
def get_session_user(project_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""\
|
||||
SELECT
|
||||
user_id,
|
||||
count(*) as session_count,
|
||||
max(start_ts) as last_seen,
|
||||
min(start_ts) as first_seen
|
||||
FROM
|
||||
"public".sessions
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
AND user_id = %(userId)s
|
||||
AND duration is not null
|
||||
""" \
|
||||
SELECT user_id,
|
||||
count(*) as session_count,
|
||||
max(start_ts) as last_seen,
|
||||
min(start_ts) as first_seen
|
||||
FROM "public".sessions
|
||||
WHERE project_id = %(project_id)s
|
||||
AND user_id = %(userId)s
|
||||
AND duration is not null
|
||||
GROUP BY user_id;
|
||||
""",
|
||||
{"project_id": project_id, "userId": user_id}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import ast
|
||||
import logging
|
||||
from typing import List, Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata, projects
|
||||
|
|
@ -219,7 +218,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
|
|||
}
|
||||
|
||||
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema):
|
||||
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
|
||||
schemas.EventType.GRAPHQL] \
|
||||
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
|
||||
|
|
@ -1,269 +0,0 @@
|
|||
import logging
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core.collaborations.collaboration_msteams import MSTeams
|
||||
from chalicelib.core.collaborations.collaboration_slack import Slack
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_note(tenant_id, project_id, user_id, note_id, share=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
|
||||
{",(SELECT name FROM users WHERE user_id=%(share)s AND deleted_at ISNULL) AS share_name" if share else ""}
|
||||
FROM sessions_notes INNER JOIN users USING (user_id)
|
||||
WHERE sessions_notes.project_id = %(project_id)s
|
||||
AND sessions_notes.note_id = %(note_id)s
|
||||
AND sessions_notes.deleted_at IS NULL
|
||||
AND (sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public);""",
|
||||
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id,
|
||||
"note_id": note_id, "share": share})
|
||||
|
||||
cur.execute(query=query)
|
||||
row = cur.fetchone()
|
||||
row = helper.dict_to_camel_case(row)
|
||||
if row:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
row["updatedAt"] = TimeUTC.datetime_to_timestamp(row["updatedAt"])
|
||||
return row
|
||||
|
||||
|
||||
def get_session_notes(tenant_id, project_id, session_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
|
||||
FROM sessions_notes INNER JOIN users USING (user_id)
|
||||
WHERE sessions_notes.project_id = %(project_id)s
|
||||
AND sessions_notes.deleted_at IS NULL
|
||||
AND sessions_notes.session_id = %(session_id)s
|
||||
AND (sessions_notes.user_id = %(user_id)s
|
||||
OR sessions_notes.is_public)
|
||||
ORDER BY created_at DESC;""",
|
||||
{"project_id": project_id, "user_id": user_id,
|
||||
"tenant_id": tenant_id, "session_id": session_id})
|
||||
|
||||
cur.execute(query=query)
|
||||
rows = cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
for row in rows:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
return rows
|
||||
|
||||
|
||||
def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
# base conditions
|
||||
conditions = [
|
||||
"sessions_notes.project_id = %(project_id)s",
|
||||
"sessions_notes.deleted_at IS NULL"
|
||||
]
|
||||
params = {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}
|
||||
|
||||
# tag conditions
|
||||
if data.tags:
|
||||
tag_key = "tag_value"
|
||||
conditions.append(
|
||||
sh.multi_conditions(f"%({tag_key})s = sessions_notes.tag", data.tags, value_key=tag_key)
|
||||
)
|
||||
params.update(sh.multi_values(data.tags, value_key=tag_key))
|
||||
|
||||
# filter by ownership or shared status
|
||||
if data.shared_only:
|
||||
conditions.append("sessions_notes.is_public IS TRUE")
|
||||
elif data.mine_only:
|
||||
conditions.append("sessions_notes.user_id = %(user_id)s")
|
||||
else:
|
||||
conditions.append("(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public)")
|
||||
|
||||
# search condition
|
||||
if data.search:
|
||||
conditions.append("sessions_notes.message ILIKE %(search)s")
|
||||
params["search"] = f"%{data.search}%"
|
||||
|
||||
query = f"""
|
||||
SELECT
|
||||
COUNT(1) OVER () AS full_count,
|
||||
sessions_notes.*,
|
||||
users.name AS user_name
|
||||
FROM
|
||||
sessions_notes
|
||||
INNER JOIN
|
||||
users USING (user_id)
|
||||
WHERE
|
||||
{" AND ".join(conditions)}
|
||||
ORDER BY
|
||||
created_at {data.order}
|
||||
LIMIT
|
||||
%(limit)s OFFSET %(offset)s;
|
||||
"""
|
||||
params.update({
|
||||
"limit": data.limit,
|
||||
"offset": data.limit * (data.page - 1)
|
||||
})
|
||||
|
||||
query = cur.mogrify(query, params)
|
||||
logger.debug(query)
|
||||
cur.execute(query)
|
||||
rows = cur.fetchall()
|
||||
|
||||
result = {"count": 0, "notes": helper.list_to_camel_case(rows)}
|
||||
if rows:
|
||||
result["count"] = rows[0]["fullCount"]
|
||||
for row in rows:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
row.pop("fullCount")
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNoteSchema):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public, thumbnail, start_at, end_at)
|
||||
VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s, %(thumbnail)s, %(start_at)s, %(end_at)s)
|
||||
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""",
|
||||
{"user_id": user_id, "project_id": project_id, "session_id": session_id,
|
||||
**data.model_dump()})
|
||||
cur.execute(query)
|
||||
result = helper.dict_to_camel_case(cur.fetchone())
|
||||
if result:
|
||||
result["createdAt"] = TimeUTC.datetime_to_timestamp(result["createdAt"])
|
||||
return result
|
||||
|
||||
|
||||
def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNoteSchema):
|
||||
sub_query = []
|
||||
if data.message is not None:
|
||||
sub_query.append("message = %(message)s")
|
||||
if data.tag is not None and len(data.tag) > 0:
|
||||
sub_query.append("tag = %(tag)s")
|
||||
if data.is_public is not None:
|
||||
sub_query.append("is_public = %(is_public)s")
|
||||
if data.timestamp is not None:
|
||||
sub_query.append("timestamp = %(timestamp)s")
|
||||
|
||||
sub_query.append("updated_at = timezone('utc'::text, now())")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""UPDATE public.sessions_notes
|
||||
SET
|
||||
{" ,".join(sub_query)}
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
AND user_id = %(user_id)s
|
||||
AND note_id = %(note_id)s
|
||||
AND deleted_at ISNULL
|
||||
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""",
|
||||
{"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.model_dump()})
|
||||
)
|
||||
row = helper.dict_to_camel_case(cur.fetchone())
|
||||
if row:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
return row
|
||||
return {"errors": ["Note not found"]}
|
||||
|
||||
|
||||
def delete(project_id, note_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(""" UPDATE public.sessions_notes
|
||||
SET deleted_at = timezone('utc'::text, now())
|
||||
WHERE note_id = %(note_id)s
|
||||
AND project_id = %(project_id)s
|
||||
AND deleted_at ISNULL;""",
|
||||
{"project_id": project_id, "note_id": note_id})
|
||||
)
|
||||
return {"data": {"state": "success"}}
|
||||
|
||||
|
||||
def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id):
|
||||
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
|
||||
if note is None:
|
||||
return {"errors": ["Note not found"]}
|
||||
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}")
|
||||
if note["timestamp"] > 0:
|
||||
session_url += f"&jumpto={note['timestamp']}"
|
||||
title = f"<{session_url}|Note for session {note['sessionId']}>"
|
||||
|
||||
blocks = [{"type": "section",
|
||||
"fields": [{"type": "mrkdwn",
|
||||
"text": title}]},
|
||||
{"type": "section",
|
||||
"fields": [{"type": "plain_text",
|
||||
"text": note["message"]}]}]
|
||||
if note["tag"]:
|
||||
blocks.append({"type": "context",
|
||||
"elements": [{"type": "plain_text",
|
||||
"text": f"Tag: *{note['tag']}*"}]})
|
||||
bottom = f"Created by {note['userName'].capitalize()}"
|
||||
if user_id != note["userId"]:
|
||||
bottom += f"\nSent by {note['shareName']}: "
|
||||
blocks.append({"type": "context",
|
||||
"elements": [{"type": "plain_text",
|
||||
"text": bottom}]})
|
||||
return Slack.send_raw(
|
||||
tenant_id=tenant_id,
|
||||
webhook_id=webhook_id,
|
||||
body={"blocks": blocks}
|
||||
)
|
||||
|
||||
|
||||
def share_to_msteams(tenant_id, user_id, project_id, note_id, webhook_id):
|
||||
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
|
||||
if note is None:
|
||||
return {"errors": ["Note not found"]}
|
||||
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}")
|
||||
if note["timestamp"] > 0:
|
||||
session_url += f"&jumpto={note['timestamp']}"
|
||||
title = f"[Note for session {note['sessionId']}]({session_url})"
|
||||
|
||||
blocks = [{
|
||||
"type": "TextBlock",
|
||||
"text": title,
|
||||
"style": "heading",
|
||||
"size": "Large"
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"spacing": "Small",
|
||||
"text": note["message"]
|
||||
}
|
||||
]
|
||||
if note["tag"]:
|
||||
blocks.append({"type": "TextBlock",
|
||||
"spacing": "Small",
|
||||
"text": f"Tag: *{note['tag']}*",
|
||||
"size": "Small"})
|
||||
bottom = f"Created by {note['userName'].capitalize()}"
|
||||
if user_id != note["userId"]:
|
||||
bottom += f"\nSent by {note['shareName']}: "
|
||||
blocks.append({"type": "TextBlock",
|
||||
"spacing": "Default",
|
||||
"text": bottom,
|
||||
"size": "Small",
|
||||
"fontType": "Monospace"})
|
||||
return MSTeams.send_raw(
|
||||
tenant_id=tenant_id,
|
||||
webhook_id=webhook_id,
|
||||
body={"type": "message",
|
||||
"attachments": [
|
||||
{"contentType": "application/vnd.microsoft.card.adaptive",
|
||||
"contentUrl": None,
|
||||
"content": {
|
||||
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
|
||||
"type": "AdaptiveCard",
|
||||
"version": "1.5",
|
||||
"body": [{
|
||||
"type": "ColumnSet",
|
||||
"style": "emphasis",
|
||||
"separator": True,
|
||||
"bleed": True,
|
||||
"columns": [{"width": "stretch",
|
||||
"items": blocks,
|
||||
"type": "Column"}]
|
||||
}]}}
|
||||
]})
|
||||
|
|
@ -143,12 +143,12 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
for e in data.events:
|
||||
if e.type == schemas.EventType.LOCATION:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
"operator": e.operator,
|
||||
"filters": e.filters
|
||||
"filters": []
|
||||
})
|
||||
for v in e.value:
|
||||
if v not in extra_conditions[e.operator].value:
|
||||
|
|
@ -160,12 +160,12 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
for e in data.events:
|
||||
if e.type == schemas.EventType.REQUEST_DETAILS:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
"operator": e.operator,
|
||||
"filters": e.filters
|
||||
"filters": []
|
||||
})
|
||||
for v in e.value:
|
||||
if v not in extra_conditions[e.operator].value:
|
||||
|
|
@ -273,7 +273,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
return sessions
|
||||
|
||||
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema):
|
||||
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
|
||||
schemas.EventType.GRAPHQL] \
|
||||
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
|
||||
|
|
@ -989,7 +989,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
sh.multi_conditions(f"ev.{events.EventType.LOCATION.column} {op} %({e_k})s",
|
||||
c.value, value_key=e_k))
|
||||
else:
|
||||
logger.warning(f"unsupported extra_event type: {c.type}")
|
||||
logger.warning(f"unsupported extra_event type:${c.type}")
|
||||
if len(_extra_or_condition) > 0:
|
||||
extra_constraints.append("(" + " OR ".join(_extra_or_condition) + ")")
|
||||
query_part = f"""\
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import ast
|
||||
import json
|
||||
import logging
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import metadata, projects
|
||||
from chalicelib.core import metadata
|
||||
from chalicelib.utils import helper, ch_client, exp_ch_helper
|
||||
from . import sessions_favorite, sessions_search_legacy, sessions_ch as sessions, sessions_legacy_mobil
|
||||
from chalicelib.utils import pg_client, helper, ch_client, exp_ch_helper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -57,11 +57,15 @@ SESSION_PROJECTION_COLS_CH_MAP = """\
|
|||
"""
|
||||
|
||||
|
||||
def __parse_metadata(metadata_map):
|
||||
return json.loads(metadata_map.replace("'", '"').replace("NULL", 'null'))
|
||||
|
||||
|
||||
# This function executes the query and return result
|
||||
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.ProjectContext,
|
||||
user_id, errors_only=False,
|
||||
error_status=schemas.ErrorStatus.ALL, count_only=False, issue=None, ids_only=False,
|
||||
platform="web"):
|
||||
user_id, errors_only=False, error_status=schemas.ErrorStatus.ALL,
|
||||
count_only=False, issue=None, ids_only=False):
|
||||
platform = project.platform
|
||||
if data.bookmarked:
|
||||
data.startTimestamp, data.endTimestamp = sessions_favorite.get_start_end_timestamp(project.project_id, user_id)
|
||||
if data.startTimestamp is None:
|
||||
|
|
@ -69,7 +73,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
return {
|
||||
'total': 0,
|
||||
'sessions': [],
|
||||
'src': 2
|
||||
'_src': 2
|
||||
}
|
||||
if project.platform == "web":
|
||||
full_args, query_part = sessions.search_query_parts_ch(data=data, error_status=error_status,
|
||||
|
|
@ -123,7 +127,8 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
|
||||
meta_keys = metadata.get(project_id=project.project_id)
|
||||
meta_map = ",map(%s) AS 'metadata'" \
|
||||
% ','.join([f"'{m['key']}',coalesce(metadata_{m['index']},'None')" for m in meta_keys])
|
||||
% ','.join(
|
||||
[f"'{m['key']}',coalesce(metadata_{m['index']},CAST(NULL AS Nullable(String)))" for m in meta_keys])
|
||||
main_query = cur.mogrify(f"""SELECT COUNT(*) AS count,
|
||||
COALESCE(JSONB_AGG(users_sessions)
|
||||
FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions
|
||||
|
|
@ -158,7 +163,8 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
|
||||
meta_keys = metadata.get(project_id=project.project_id)
|
||||
meta_map = ",'metadata',toString(map(%s))" \
|
||||
% ','.join([f"'{m['key']}',coalesce(metadata_{m['index']},'None')" for m in meta_keys])
|
||||
% ','.join(
|
||||
[f"'{m['key']}',coalesce(metadata_{m['index']},CAST(NULL AS Nullable(String)))" for m in meta_keys])
|
||||
main_query = cur.format(query=f"""SELECT any(total) AS count,
|
||||
groupArray(%(sessions_limit)s)(details) AS sessions
|
||||
FROM (SELECT total, details
|
||||
|
|
@ -175,11 +181,11 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
ORDER BY sort_key {data.order}
|
||||
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s) AS sorted_sessions;""",
|
||||
parameters=full_args)
|
||||
logging.debug("--------------------")
|
||||
logging.debug(main_query)
|
||||
logging.debug("--------------------")
|
||||
|
||||
try:
|
||||
logging.debug("--------------------")
|
||||
sessions_list = cur.execute(main_query)
|
||||
logging.debug("--------------------")
|
||||
except Exception as err:
|
||||
logging.warning("--------- SESSIONS-CH SEARCH QUERY EXCEPTION -----------")
|
||||
logging.warning(main_query)
|
||||
|
|
@ -200,83 +206,24 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
for i, s in enumerate(sessions_list):
|
||||
sessions_list[i] = {**s.pop("last_session")[0], **s}
|
||||
sessions_list[i].pop("rn")
|
||||
sessions_list[i]["metadata"] = ast.literal_eval(sessions_list[i]["metadata"])
|
||||
sessions_list[i]["metadata"] = __parse_metadata(sessions_list[i]["metadata"])
|
||||
else:
|
||||
import json
|
||||
for i in range(len(sessions_list)):
|
||||
sessions_list[i]["metadata"] = ast.literal_eval(sessions_list[i]["metadata"])
|
||||
sessions_list[i] = schemas.SessionModel.parse_obj(helper.dict_to_camel_case(sessions_list[i]))
|
||||
sessions_list[i]["metadata"] = __parse_metadata(sessions_list[i]["metadata"])
|
||||
sessions_list[i] = schemas.SessionModel.model_validate(helper.dict_to_camel_case(sessions_list[i]))
|
||||
|
||||
return {
|
||||
'total': total,
|
||||
'sessions': sessions_list,
|
||||
'src': 2
|
||||
'_src': 2
|
||||
}
|
||||
|
||||
|
||||
def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
|
||||
if project_id is None:
|
||||
all_projects = projects.get_projects(tenant_id=tenant_id)
|
||||
else:
|
||||
all_projects = [
|
||||
projects.get_project(tenant_id=tenant_id, project_id=int(project_id), include_last_session=False,
|
||||
include_gdpr=False)]
|
||||
|
||||
all_projects = {int(p["projectId"]): p["name"] for p in all_projects}
|
||||
project_ids = list(all_projects.keys())
|
||||
|
||||
available_keys = metadata.get_keys_by_projects(project_ids)
|
||||
for i in available_keys:
|
||||
available_keys[i]["user_id"] = schemas.FilterType.USER_ID
|
||||
available_keys[i]["user_anonymous_id"] = schemas.FilterType.USER_ANONYMOUS_ID
|
||||
results = {}
|
||||
for i in project_ids:
|
||||
if m_key not in available_keys[i].values():
|
||||
available_keys.pop(i)
|
||||
results[i] = {"total": 0, "sessions": [], "missingMetadata": True}
|
||||
project_ids = list(available_keys.keys())
|
||||
if len(project_ids) > 0:
|
||||
with pg_client.PostgresClient() as cur:
|
||||
sub_queries = []
|
||||
for i in project_ids:
|
||||
col_name = list(available_keys[i].keys())[list(available_keys[i].values()).index(m_key)]
|
||||
sub_queries.append(cur.mogrify(
|
||||
f"(SELECT COALESCE(COUNT(s.*)) AS count FROM public.sessions AS s WHERE s.project_id = %(id)s AND s.{col_name} = %(value)s) AS \"{i}\"",
|
||||
{"id": i, "value": m_value}).decode('UTF-8'))
|
||||
query = f"""SELECT {", ".join(sub_queries)};"""
|
||||
cur.execute(query=query)
|
||||
|
||||
rows = cur.fetchone()
|
||||
|
||||
sub_queries = []
|
||||
for i in rows.keys():
|
||||
results[i] = {"total": rows[i], "sessions": [], "missingMetadata": False, "name": all_projects[int(i)]}
|
||||
if rows[i] > 0:
|
||||
col_name = list(available_keys[int(i)].keys())[list(available_keys[int(i)].values()).index(m_key)]
|
||||
sub_queries.append(
|
||||
cur.mogrify(
|
||||
f"""(
|
||||
SELECT *
|
||||
FROM (
|
||||
SELECT DISTINCT ON(favorite_sessions.session_id, s.session_id) {SESSION_PROJECTION_COLS_CH}
|
||||
FROM public.sessions AS s LEFT JOIN (SELECT session_id
|
||||
FROM public.user_favorite_sessions
|
||||
WHERE user_favorite_sessions.user_id = %(userId)s
|
||||
) AS favorite_sessions USING (session_id)
|
||||
WHERE s.project_id = %(id)s AND s.duration IS NOT NULL AND s.{col_name} = %(value)s
|
||||
) AS full_sessions
|
||||
ORDER BY favorite DESC, issue_score DESC
|
||||
LIMIT 10
|
||||
)""",
|
||||
{"id": i, "value": m_value, "userId": user_id}).decode('UTF-8'))
|
||||
if len(sub_queries) > 0:
|
||||
cur.execute("\nUNION\n".join(sub_queries))
|
||||
rows = cur.fetchall()
|
||||
for i in rows:
|
||||
results[str(i["project_id"])]["sessions"].append(helper.dict_to_camel_case(i))
|
||||
return results
|
||||
return sessions_search_legacy.search_by_metadata(tenant_id, user_id, m_key, m_value, project_id)
|
||||
|
||||
|
||||
# TODO: rewrite this function to use ClickHouse
|
||||
def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 'session_id',
|
||||
ascending: bool = False) -> dict:
|
||||
return sessions_search_legacy.search_sessions_by_ids(project_id, session_ids, sort_by, ascending)
|
||||
|
|
@ -40,7 +40,8 @@ COALESCE((SELECT TRUE
|
|||
# This function executes the query and return result
|
||||
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.ProjectContext,
|
||||
user_id, errors_only=False, error_status=schemas.ErrorStatus.ALL,
|
||||
count_only=False, issue=None, ids_only=False, platform="web"):
|
||||
count_only=False, issue=None, ids_only=False):
|
||||
platform = project.platform
|
||||
if data.bookmarked:
|
||||
data.startTimestamp, data.endTimestamp = sessions_favorite.get_start_end_timestamp(project.project_id, user_id)
|
||||
if data.startTimestamp is None:
|
||||
|
|
@ -48,7 +49,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
return {
|
||||
'total': 0,
|
||||
'sessions': [],
|
||||
'src': 1
|
||||
'_src': 1
|
||||
}
|
||||
full_args, query_part = sessions_legacy.search_query_parts(data=data, error_status=error_status,
|
||||
errors_only=errors_only,
|
||||
|
|
@ -176,7 +177,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
return {
|
||||
'total': total,
|
||||
'sessions': helper.list_to_camel_case(sessions),
|
||||
'src': 1
|
||||
'_src': 1
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -239,6 +240,7 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
|
|||
cur.execute("\nUNION\n".join(sub_queries))
|
||||
rows = cur.fetchall()
|
||||
for i in rows:
|
||||
i["_src"] = 1
|
||||
results[str(i["project_id"])]["sessions"].append(helper.dict_to_camel_case(i))
|
||||
return results
|
||||
|
||||
|
|
@ -246,7 +248,7 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
|
|||
def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 'session_id',
|
||||
ascending: bool = False) -> dict:
|
||||
if session_ids is None or len(session_ids) == 0:
|
||||
return {"total": 0, "sessions": []}
|
||||
return {"total": 0, "sessions": [], "_src": 1}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
params = {"project_id": project_id, "session_ids": tuple(session_ids)}
|
||||
|
|
@ -265,4 +267,4 @@ def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 's
|
|||
s["metadata"] = {}
|
||||
for m in meta_keys:
|
||||
s["metadata"][m["key"]] = s.pop(f'metadata_{m["index"]}')
|
||||
return {"total": len(rows), "sessions": helper.list_to_camel_case(rows)}
|
||||
return {"total": len(rows), "sessions": helper.list_to_camel_case(rows), "_src": 1}
|
||||
|
|
@ -11,9 +11,3 @@ if smtp.has_smtp():
|
|||
logger.info("valid SMTP configuration found")
|
||||
else:
|
||||
logger.info("no SMTP configuration found or SMTP validation failed")
|
||||
|
||||
if config("EXP_CH_DRIVER", cast=bool, default=True):
|
||||
logging.info(">>> Using new CH driver")
|
||||
from . import ch_client_exp as ch_client
|
||||
else:
|
||||
from . import ch_client
|
||||
|
|
|
|||
|
|
@ -1,73 +1,185 @@
|
|||
import logging
|
||||
import threading
|
||||
import time
|
||||
from functools import wraps
|
||||
from queue import Queue, Empty
|
||||
|
||||
import clickhouse_driver
|
||||
import clickhouse_connect
|
||||
from clickhouse_connect.driver.query import QueryContext
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_CH_CONFIG = {"host": config("ch_host"),
|
||||
"user": config("ch_user", default="default"),
|
||||
"password": config("ch_password", default=""),
|
||||
"port": config("ch_port_http", cast=int),
|
||||
"client_name": config("APP_NAME", default="PY")}
|
||||
CH_CONFIG = dict(_CH_CONFIG)
|
||||
|
||||
settings = {}
|
||||
if config('ch_timeout', cast=int, default=-1) > 0:
|
||||
logger.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
|
||||
|
||||
if config('ch_receive_timeout', cast=int, default=-1) > 0:
|
||||
logger.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
|
||||
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
|
||||
|
||||
def transform_result(self, original_function):
|
||||
@wraps(original_function)
|
||||
def wrapper(*args, **kwargs):
|
||||
if kwargs.get("parameters"):
|
||||
if config("LOCAL_DEV", cast=bool, default=False):
|
||||
logger.debug(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters")))
|
||||
else:
|
||||
logger.debug(
|
||||
str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
|
||||
elif len(args) > 0:
|
||||
if config("LOCAL_DEV", cast=bool, default=False):
|
||||
logger.debug(args[0])
|
||||
else:
|
||||
logger.debug(str.encode(args[0]))
|
||||
result = original_function(*args, **kwargs)
|
||||
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
|
||||
column_names = result.column_names
|
||||
result = result.result_rows
|
||||
result = [dict(zip(column_names, row)) for row in result]
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class ClickHouseConnectionPool:
|
||||
def __init__(self, min_size, max_size):
|
||||
self.min_size = min_size
|
||||
self.max_size = max_size
|
||||
self.pool = Queue()
|
||||
self.lock = threading.Lock()
|
||||
self.total_connections = 0
|
||||
|
||||
# Initialize the pool with min_size connections
|
||||
for _ in range(self.min_size):
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.pool.put(client)
|
||||
self.total_connections += 1
|
||||
|
||||
def get_connection(self):
|
||||
try:
|
||||
# Try to get a connection without blocking
|
||||
client = self.pool.get_nowait()
|
||||
return client
|
||||
except Empty:
|
||||
with self.lock:
|
||||
if self.total_connections < self.max_size:
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.total_connections += 1
|
||||
return client
|
||||
# If max_size reached, wait until a connection is available
|
||||
client = self.pool.get()
|
||||
return client
|
||||
|
||||
def release_connection(self, client):
|
||||
self.pool.put(client)
|
||||
|
||||
def close_all(self):
|
||||
with self.lock:
|
||||
while not self.pool.empty():
|
||||
client = self.pool.get()
|
||||
client.close()
|
||||
self.total_connections = 0
|
||||
|
||||
|
||||
CH_pool: ClickHouseConnectionPool = None
|
||||
|
||||
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
|
||||
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
|
||||
RETRY = 0
|
||||
|
||||
|
||||
def make_pool():
|
||||
if not config('CH_POOL', cast=bool, default=True):
|
||||
return
|
||||
global CH_pool
|
||||
global RETRY
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
try:
|
||||
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
|
||||
max_size=config("CH_MAXCONN", cast=int, default=8))
|
||||
if CH_pool is not None:
|
||||
logger.info("Connection pool created successfully for CH")
|
||||
except ConnectionError as error:
|
||||
logger.error("Error while connecting to CH", exc_info=error)
|
||||
if RETRY < RETRY_MAX:
|
||||
RETRY += 1
|
||||
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
time.sleep(RETRY_INTERVAL)
|
||||
make_pool()
|
||||
else:
|
||||
raise error
|
||||
|
||||
|
||||
class ClickHouseClient:
|
||||
__client = None
|
||||
|
||||
def __init__(self, database=None):
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
self.__client = clickhouse_driver.Client(host=config("ch_host"),
|
||||
database=database if database else config("ch_database",
|
||||
default="default"),
|
||||
user=config("ch_user", default="default"),
|
||||
password=config("ch_password", default=""),
|
||||
port=config("ch_port", cast=int),
|
||||
settings=settings,
|
||||
**extra_args) \
|
||||
if self.__client is None else self.__client
|
||||
if self.__client is None:
|
||||
if database is not None or not config('CH_POOL', cast=bool, default=True):
|
||||
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=database if database else config("ch_database",
|
||||
default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
|
||||
else:
|
||||
self.__client = CH_pool.get_connection()
|
||||
|
||||
self.__client.execute = transform_result(self, self.__client.query)
|
||||
self.__client.format = self.format
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def execute(self, query, parameters=None, **args):
|
||||
try:
|
||||
results = self.__client.execute(query=query, params=parameters, with_column_types=True, **args)
|
||||
keys = tuple(x for x, y in results[1])
|
||||
return [dict(zip(keys, i)) for i in results[0]]
|
||||
except Exception as err:
|
||||
logger.error("--------- CH EXCEPTION -----------", exc_info=err)
|
||||
logger.error("--------- CH QUERY EXCEPTION -----------")
|
||||
logger.error(self.format(query=query, parameters=parameters)
|
||||
.replace('\n', '\\n')
|
||||
.replace(' ', ' ')
|
||||
.replace(' ', ' '))
|
||||
logger.error("--------------------")
|
||||
raise err
|
||||
|
||||
def insert(self, query, params=None, **args):
|
||||
return self.__client.execute(query=query, params=params, **args)
|
||||
|
||||
def client(self):
|
||||
return self.__client
|
||||
|
||||
def format(self, query, parameters):
|
||||
if parameters is None:
|
||||
return query
|
||||
return self.__client.substitute_params(query, parameters, self.__client.connection.context)
|
||||
def format(self, query, parameters=None):
|
||||
if parameters:
|
||||
ctx = QueryContext(query=query, parameters=parameters)
|
||||
return ctx.final_query
|
||||
return query
|
||||
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
CH_pool.release_connection(self.__client)
|
||||
else:
|
||||
self.__client.close()
|
||||
|
||||
|
||||
async def init():
|
||||
logger.info(f">CH_POOL:not defined")
|
||||
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
make_pool()
|
||||
|
||||
|
||||
async def terminate():
|
||||
pass
|
||||
global CH_pool
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
logger.info("Closed all connexions to CH")
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
|
|
|
|||
|
|
@ -1,178 +0,0 @@
|
|||
import logging
|
||||
import threading
|
||||
import time
|
||||
from functools import wraps
|
||||
from queue import Queue, Empty
|
||||
|
||||
import clickhouse_connect
|
||||
from clickhouse_connect.driver.query import QueryContext
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_CH_CONFIG = {"host": config("ch_host"),
|
||||
"user": config("ch_user", default="default"),
|
||||
"password": config("ch_password", default=""),
|
||||
"port": config("ch_port_http", cast=int),
|
||||
"client_name": config("APP_NAME", default="PY")}
|
||||
CH_CONFIG = dict(_CH_CONFIG)
|
||||
|
||||
settings = {}
|
||||
if config('ch_timeout', cast=int, default=-1) > 0:
|
||||
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
|
||||
|
||||
if config('ch_receive_timeout', cast=int, default=-1) > 0:
|
||||
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
|
||||
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
|
||||
|
||||
def transform_result(self, original_function):
|
||||
@wraps(original_function)
|
||||
def wrapper(*args, **kwargs):
|
||||
if kwargs.get("parameters"):
|
||||
logger.debug(str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
|
||||
elif len(args) > 0:
|
||||
logger.debug(str.encode(args[0]))
|
||||
result = original_function(*args, **kwargs)
|
||||
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
|
||||
column_names = result.column_names
|
||||
result = result.result_rows
|
||||
result = [dict(zip(column_names, row)) for row in result]
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class ClickHouseConnectionPool:
|
||||
def __init__(self, min_size, max_size):
|
||||
self.min_size = min_size
|
||||
self.max_size = max_size
|
||||
self.pool = Queue()
|
||||
self.lock = threading.Lock()
|
||||
self.total_connections = 0
|
||||
|
||||
# Initialize the pool with min_size connections
|
||||
for _ in range(self.min_size):
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.pool.put(client)
|
||||
self.total_connections += 1
|
||||
|
||||
def get_connection(self):
|
||||
try:
|
||||
# Try to get a connection without blocking
|
||||
client = self.pool.get_nowait()
|
||||
return client
|
||||
except Empty:
|
||||
with self.lock:
|
||||
if self.total_connections < self.max_size:
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.total_connections += 1
|
||||
return client
|
||||
# If max_size reached, wait until a connection is available
|
||||
client = self.pool.get()
|
||||
return client
|
||||
|
||||
def release_connection(self, client):
|
||||
self.pool.put(client)
|
||||
|
||||
def close_all(self):
|
||||
with self.lock:
|
||||
while not self.pool.empty():
|
||||
client = self.pool.get()
|
||||
client.close()
|
||||
self.total_connections = 0
|
||||
|
||||
|
||||
CH_pool: ClickHouseConnectionPool = None
|
||||
|
||||
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
|
||||
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
|
||||
RETRY = 0
|
||||
|
||||
|
||||
def make_pool():
|
||||
if not config('CH_POOL', cast=bool, default=True):
|
||||
return
|
||||
global CH_pool
|
||||
global RETRY
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
try:
|
||||
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
|
||||
max_size=config("CH_MAXCONN", cast=int, default=8))
|
||||
if CH_pool is not None:
|
||||
logger.info("Connection pool created successfully for CH")
|
||||
except ConnectionError as error:
|
||||
logger.error("Error while connecting to CH", exc_info=error)
|
||||
if RETRY < RETRY_MAX:
|
||||
RETRY += 1
|
||||
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
time.sleep(RETRY_INTERVAL)
|
||||
make_pool()
|
||||
else:
|
||||
raise error
|
||||
|
||||
|
||||
class ClickHouseClient:
|
||||
__client = None
|
||||
|
||||
def __init__(self, database=None):
|
||||
if self.__client is None:
|
||||
if database is not None or not config('CH_POOL', cast=bool, default=True):
|
||||
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=database if database else config("ch_database",
|
||||
default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
|
||||
else:
|
||||
self.__client = CH_pool.get_connection()
|
||||
|
||||
self.__client.execute = transform_result(self, self.__client.query)
|
||||
self.__client.format = self.format
|
||||
|
||||
def __enter__(self):
|
||||
return self.__client
|
||||
|
||||
def format(self, query, parameters=None):
|
||||
if parameters:
|
||||
ctx = QueryContext(query=query, parameters=parameters)
|
||||
return ctx.final_query
|
||||
return query
|
||||
|
||||
def __exit__(self, *args):
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
CH_pool.release_connection(self.__client)
|
||||
else:
|
||||
self.__client.close()
|
||||
|
||||
|
||||
async def init():
|
||||
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
make_pool()
|
||||
|
||||
|
||||
async def terminate():
|
||||
global CH_pool
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
logger.info("Closed all connexions to CH")
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
|
|
@ -1,7 +1,13 @@
|
|||
from typing import Union
|
||||
import logging
|
||||
import re
|
||||
from typing import Union, Any
|
||||
|
||||
import schemas
|
||||
import logging
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from schemas import SearchEventOperator
|
||||
import math
|
||||
import struct
|
||||
from decimal import Decimal
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -66,3 +72,162 @@ def get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEvent
|
|||
if event_type not in defs:
|
||||
raise Exception(f"unsupported EventType:{event_type}")
|
||||
return defs.get(event_type)
|
||||
|
||||
|
||||
# AI generated
|
||||
def simplify_clickhouse_type(ch_type: str) -> str:
|
||||
"""
|
||||
Simplify a ClickHouse data type name to a broader category like:
|
||||
int, float, decimal, datetime, string, uuid, enum, array, tuple, map, nested, etc.
|
||||
"""
|
||||
|
||||
# 1) Strip out common wrappers like Nullable(...) or LowCardinality(...)
|
||||
# Possibly multiple wrappers: e.g. "LowCardinality(Nullable(Int32))"
|
||||
pattern_wrappers = re.compile(r'(Nullable|LowCardinality)\((.*)\)')
|
||||
while True:
|
||||
match = pattern_wrappers.match(ch_type)
|
||||
if match:
|
||||
ch_type = match.group(2)
|
||||
else:
|
||||
break
|
||||
|
||||
# 2) Normalize (lowercase) for easier checks
|
||||
normalized_type = ch_type.lower()
|
||||
|
||||
# 3) Use pattern matching or direct checks for known categories
|
||||
# (You can adapt this as you see fit for your environment.)
|
||||
|
||||
# Integers: Int8, Int16, Int32, Int64, Int128, Int256, UInt8, UInt16, ...
|
||||
if re.match(r'^(u?int)(8|16|32|64|128|256)$', normalized_type):
|
||||
return "int"
|
||||
|
||||
# Floats: Float32, Float64
|
||||
if re.match(r'^float(32|64)|double$', normalized_type):
|
||||
return "float"
|
||||
|
||||
# Decimal: Decimal(P, S)
|
||||
if normalized_type.startswith("decimal"):
|
||||
# return "decimal"
|
||||
return "float"
|
||||
|
||||
# Date/DateTime
|
||||
if normalized_type.startswith("date"):
|
||||
return "datetime"
|
||||
if normalized_type.startswith("datetime"):
|
||||
return "datetime"
|
||||
|
||||
# Strings: String, FixedString(N)
|
||||
if normalized_type.startswith("string"):
|
||||
return "string"
|
||||
if normalized_type.startswith("fixedstring"):
|
||||
return "string"
|
||||
|
||||
# UUID
|
||||
if normalized_type.startswith("uuid"):
|
||||
# return "uuid"
|
||||
return "string"
|
||||
|
||||
# Enums: Enum8(...) or Enum16(...)
|
||||
if normalized_type.startswith("enum8") or normalized_type.startswith("enum16"):
|
||||
# return "enum"
|
||||
return "string"
|
||||
|
||||
# Arrays: Array(T)
|
||||
if normalized_type.startswith("array"):
|
||||
return "array"
|
||||
|
||||
# Tuples: Tuple(T1, T2, ...)
|
||||
if normalized_type.startswith("tuple"):
|
||||
return "tuple"
|
||||
|
||||
# Map(K, V)
|
||||
if normalized_type.startswith("map"):
|
||||
return "map"
|
||||
|
||||
# Nested(...)
|
||||
if normalized_type.startswith("nested"):
|
||||
return "nested"
|
||||
|
||||
# If we didn't match above, just return the original type in lowercase
|
||||
return normalized_type
|
||||
|
||||
|
||||
def simplify_clickhouse_types(ch_types: list[str]) -> list[str]:
|
||||
"""
|
||||
Takes a list of ClickHouse types and returns a list of simplified types
|
||||
by calling `simplify_clickhouse_type` on each.
|
||||
"""
|
||||
return list(set([simplify_clickhouse_type(t) for t in ch_types]))
|
||||
|
||||
|
||||
def get_sub_condition(col_name: str, val_name: str,
|
||||
operator: Union[schemas.SearchEventOperator, schemas.MathOperator]) -> str:
|
||||
if operator == SearchEventOperator.PATTERN:
|
||||
return f"match({col_name}, %({val_name})s)"
|
||||
op = sh.get_sql_operator(operator)
|
||||
return f"{col_name} {op} %({val_name})s"
|
||||
|
||||
|
||||
def get_col_cast(data_type: schemas.PropertyType, value: Any) -> str:
|
||||
if value is None or len(value) == 0:
|
||||
return ""
|
||||
if isinstance(value, list):
|
||||
value = value[0]
|
||||
if data_type in (schemas.PropertyType.INT, schemas.PropertyType.FLOAT):
|
||||
return best_clickhouse_type(value)
|
||||
return data_type.capitalize()
|
||||
|
||||
|
||||
# (type_name, minimum, maximum) – ordered by increasing size
|
||||
_INT_RANGES = [
|
||||
("Int8", -128, 127),
|
||||
("UInt8", 0, 255),
|
||||
("Int16", -32_768, 32_767),
|
||||
("UInt16", 0, 65_535),
|
||||
("Int32", -2_147_483_648, 2_147_483_647),
|
||||
("UInt32", 0, 4_294_967_295),
|
||||
("Int64", -9_223_372_036_854_775_808, 9_223_372_036_854_775_807),
|
||||
("UInt64", 0, 18_446_744_073_709_551_615),
|
||||
]
|
||||
|
||||
|
||||
def best_clickhouse_type(value):
|
||||
"""
|
||||
Return the most compact ClickHouse numeric type that can store *value* loss-lessly.
|
||||
|
||||
"""
|
||||
# Treat bool like tiny int
|
||||
if isinstance(value, bool):
|
||||
value = int(value)
|
||||
|
||||
# --- Integers ---
|
||||
if isinstance(value, int):
|
||||
for name, lo, hi in _INT_RANGES:
|
||||
if lo <= value <= hi:
|
||||
return name
|
||||
# Beyond UInt64: ClickHouse offers Int128 / Int256 or Decimal
|
||||
return "Int128"
|
||||
|
||||
# --- Decimal.Decimal (exact) ---
|
||||
if isinstance(value, Decimal):
|
||||
# ClickHouse Decimal32/64/128 have 9 / 18 / 38 significant digits.
|
||||
digits = len(value.as_tuple().digits)
|
||||
if digits <= 9:
|
||||
return "Decimal32"
|
||||
elif digits <= 18:
|
||||
return "Decimal64"
|
||||
else:
|
||||
return "Decimal128"
|
||||
|
||||
# --- Floats ---
|
||||
if isinstance(value, float):
|
||||
if not math.isfinite(value):
|
||||
return "Float64" # inf / nan → always Float64
|
||||
|
||||
# Check if a round-trip through 32-bit float preserves the bit pattern
|
||||
packed = struct.pack("f", value)
|
||||
if struct.unpack("f", packed)[0] == value:
|
||||
return "Float32"
|
||||
return "Float64"
|
||||
|
||||
raise TypeError(f"Unsupported type: {type(value).__name__}")
|
||||
|
|
|
|||
|
|
@ -99,6 +99,8 @@ def allow_captcha():
|
|||
|
||||
|
||||
def string_to_sql_like(value):
|
||||
if value is None:
|
||||
return None
|
||||
value = re.sub(' +', ' ', value)
|
||||
value = value.replace("*", "%")
|
||||
if value.startswith("^"):
|
||||
|
|
@ -334,5 +336,3 @@ def cast_session_id_to_string(data):
|
|||
for key in keys:
|
||||
data[key] = cast_session_id_to_string(data[key])
|
||||
return data
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -4,41 +4,40 @@ import schemas
|
|||
|
||||
|
||||
def get_sql_operator(op: Union[schemas.SearchEventOperator, schemas.ClickEventExtraOperator, schemas.MathOperator]):
|
||||
if isinstance(op, Enum):
|
||||
op = op.value
|
||||
return {
|
||||
schemas.SearchEventOperator.IS.value: "=",
|
||||
schemas.SearchEventOperator.ON.value: "=",
|
||||
schemas.SearchEventOperator.ON_ANY.value: "IN",
|
||||
schemas.SearchEventOperator.IS_NOT.value: "!=",
|
||||
schemas.SearchEventOperator.NOT_ON.value: "!=",
|
||||
schemas.SearchEventOperator.CONTAINS.value: "ILIKE",
|
||||
schemas.SearchEventOperator.NOT_CONTAINS.value: "NOT ILIKE",
|
||||
schemas.SearchEventOperator.STARTS_WITH.value: "ILIKE",
|
||||
schemas.SearchEventOperator.ENDS_WITH.value: "ILIKE",
|
||||
# Selector operators:
|
||||
schemas.ClickEventExtraOperator.IS.value: "=",
|
||||
schemas.ClickEventExtraOperator.IS_NOT.value: "!=",
|
||||
schemas.ClickEventExtraOperator.CONTAINS.value: "ILIKE",
|
||||
schemas.ClickEventExtraOperator.NOT_CONTAINS.value: "NOT ILIKE",
|
||||
schemas.ClickEventExtraOperator.STARTS_WITH.value: "ILIKE",
|
||||
schemas.ClickEventExtraOperator.ENDS_WITH.value: "ILIKE",
|
||||
schemas.SearchEventOperator.IS: "=",
|
||||
schemas.SearchEventOperator.ON: "=",
|
||||
schemas.SearchEventOperator.ON_ANY: "IN",
|
||||
schemas.SearchEventOperator.IS_NOT: "!=",
|
||||
schemas.SearchEventOperator.NOT_ON: "!=",
|
||||
schemas.SearchEventOperator.CONTAINS: "ILIKE",
|
||||
schemas.SearchEventOperator.NOT_CONTAINS: "NOT ILIKE",
|
||||
schemas.SearchEventOperator.STARTS_WITH: "ILIKE",
|
||||
schemas.SearchEventOperator.ENDS_WITH: "ILIKE",
|
||||
# this is not used as an operator, it is used in order to maintain a valid value for conditions
|
||||
schemas.SearchEventOperator.PATTERN: "regex",
|
||||
|
||||
schemas.MathOperator.GREATER.value: ">",
|
||||
schemas.MathOperator.GREATER_EQ.value: ">=",
|
||||
schemas.MathOperator.LESS.value: "<",
|
||||
schemas.MathOperator.LESS_EQ.value: "<=",
|
||||
# Selector operators:
|
||||
schemas.ClickEventExtraOperator.IS: "=",
|
||||
schemas.ClickEventExtraOperator.IS_NOT: "!=",
|
||||
schemas.ClickEventExtraOperator.CONTAINS: "ILIKE",
|
||||
schemas.ClickEventExtraOperator.NOT_CONTAINS: "NOT ILIKE",
|
||||
schemas.ClickEventExtraOperator.STARTS_WITH: "ILIKE",
|
||||
schemas.ClickEventExtraOperator.ENDS_WITH: "ILIKE",
|
||||
|
||||
schemas.MathOperator.GREATER: ">",
|
||||
schemas.MathOperator.GREATER_EQ: ">=",
|
||||
schemas.MathOperator.LESS: "<",
|
||||
schemas.MathOperator.LESS_EQ: "<=",
|
||||
}.get(op, "=")
|
||||
|
||||
|
||||
def is_negation_operator(op: schemas.SearchEventOperator):
|
||||
if isinstance(op, Enum):
|
||||
op = op.value
|
||||
return op in [schemas.SearchEventOperator.IS_NOT.value,
|
||||
schemas.SearchEventOperator.NOT_ON.value,
|
||||
schemas.SearchEventOperator.NOT_CONTAINS.value,
|
||||
schemas.ClickEventExtraOperator.IS_NOT.value,
|
||||
schemas.ClickEventExtraOperator.NOT_CONTAINS.value]
|
||||
return op in [schemas.SearchEventOperator.IS_NOT,
|
||||
schemas.SearchEventOperator.NOT_ON,
|
||||
schemas.SearchEventOperator.NOT_CONTAINS,
|
||||
schemas.ClickEventExtraOperator.IS_NOT,
|
||||
schemas.ClickEventExtraOperator.NOT_CONTAINS]
|
||||
|
||||
|
||||
def reverse_sql_operator(op):
|
||||
|
|
@ -53,12 +52,16 @@ def multi_conditions(condition, values, value_key="value", is_not=False):
|
|||
return "(" + (" AND " if is_not else " OR ").join(query) + ")"
|
||||
|
||||
|
||||
def multi_values(values, value_key="value"):
|
||||
def multi_values(values, value_key="value", data_type: schemas.PropertyType | None = None):
|
||||
query_values = {}
|
||||
if values is not None and isinstance(values, list):
|
||||
for i in range(len(values)):
|
||||
k = f"{value_key}_{i}"
|
||||
query_values[k] = values[i].value if isinstance(values[i], Enum) else values[i]
|
||||
if data_type:
|
||||
if data_type == schemas.PropertyType.STRING:
|
||||
query_values[k] = str(query_values[k])
|
||||
|
||||
return query_values
|
||||
|
||||
|
||||
|
|
@ -76,4 +79,3 @@ def single_value(values):
|
|||
if isinstance(v, Enum):
|
||||
values[i] = v.value
|
||||
return values
|
||||
|
||||
|
|
|
|||
|
|
@ -74,4 +74,5 @@ EXP_CH_DRIVER=true
|
|||
EXP_AUTOCOMPLETE=true
|
||||
EXP_ALERTS=true
|
||||
EXP_ERRORS_SEARCH=true
|
||||
EXP_METRICS=true
|
||||
EXP_METRICS=true
|
||||
EXP_SESSIONS_SEARCH=true
|
||||
|
|
@ -1,17 +1,16 @@
|
|||
urllib3==2.3.0
|
||||
urllib3==2.4.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.38.10
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
psycopg[pool,binary]==3.2.7
|
||||
clickhouse-connect==0.8.17
|
||||
elasticsearch==9.0.1
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
uvicorn[standard]==0.34.0
|
||||
fastapi==0.115.12
|
||||
uvicorn[standard]==0.34.2
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
pydantic[email]==2.11.4
|
||||
apscheduler==3.11.0
|
||||
|
|
|
|||
|
|
@ -1,19 +1,18 @@
|
|||
urllib3==2.3.0
|
||||
urllib3==2.4.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.38.10
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
psycopg[pool,binary]==3.2.7
|
||||
clickhouse-connect==0.8.17
|
||||
elasticsearch==9.0.1
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
uvicorn[standard]==0.34.0
|
||||
fastapi==0.115.12
|
||||
uvicorn[standard]==0.34.2
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
pydantic[email]==2.11.4
|
||||
apscheduler==3.11.0
|
||||
|
||||
redis==5.2.1
|
||||
redis==6.0.0
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ from chalicelib.core import webhook
|
|||
from chalicelib.core.collaborations.collaboration_slack import Slack
|
||||
from chalicelib.core.errors import errors, errors_details
|
||||
from chalicelib.core.metrics import heatmaps
|
||||
from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_viewed, \
|
||||
from chalicelib.core.sessions import sessions, sessions_replay, sessions_favorite, sessions_viewed, \
|
||||
sessions_assignments, unprocessed_sessions, sessions_search
|
||||
from chalicelib.utils import captcha, smtp
|
||||
from chalicelib.utils import contextual_validators
|
||||
|
|
@ -259,8 +259,7 @@ def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
|
|||
def search_sessions(projectId: int, data: schemas.SessionsSearchPayloadSchema = \
|
||||
Depends(contextual_validators.validate_contextual_payload),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id,
|
||||
platform=context.project.platform)
|
||||
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id)
|
||||
return {'data': data}
|
||||
|
||||
|
||||
|
|
@ -268,8 +267,7 @@ def search_sessions(projectId: int, data: schemas.SessionsSearchPayloadSchema =
|
|||
def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = \
|
||||
Depends(contextual_validators.validate_contextual_payload),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id, ids_only=True,
|
||||
platform=context.project.platform)
|
||||
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id, ids_only=True)
|
||||
return {'data': data}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -219,6 +219,17 @@ def get_card_chart(projectId: int, metric_id: int, data: schemas.CardSessionsSch
|
|||
return {"data": data}
|
||||
|
||||
|
||||
@app.post("/{projectId}/dashboards/{dashboardId}/cards/{metric_id}/chart", tags=["card"])
|
||||
@app.post("/{projectId}/dashboards/{dashboardId}/cards/{metric_id}", tags=["card"])
|
||||
def get_card_chart_for_dashboard(projectId: int, dashboardId: int, metric_id: int,
|
||||
data: schemas.CardSessionsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = custom_metrics.make_chart_from_card(
|
||||
project=context.project, user_id=context.user_id, metric_id=metric_id, data=data, for_dashboard=True
|
||||
)
|
||||
return {"data": data}
|
||||
|
||||
|
||||
@app.post("/{projectId}/cards/{metric_id}", tags=["dashboard"])
|
||||
def update_card(projectId: int, metric_id: int, data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
|
|
|
|||
77
api/routers/subs/product_analytics.py
Normal file
77
api/routers/subs/product_analytics.py
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
from typing import Annotated
|
||||
|
||||
from fastapi import Body, Depends, Query
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import metadata
|
||||
from chalicelib.core.product_analytics import events, properties, autocomplete
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
from typing import Optional
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
|
||||
@app.get('/{projectId}/filters', tags=["product_analytics"])
|
||||
def get_all_filters(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {
|
||||
"data": {
|
||||
"events": events.get_events(project_id=projectId, page=filter_query),
|
||||
"filters": properties.get_all_properties(project_id=projectId, page=filter_query),
|
||||
"metadata": metadata.get_for_filters(project_id=projectId)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@app.get('/{projectId}/events/names', tags=["product_analytics"])
|
||||
def get_all_events(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": events.get_events(project_id=projectId, page=filter_query)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/properties/search', tags=["product_analytics"])
|
||||
def get_event_properties(projectId: int, event_name: str = None,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if not event_name or len(event_name) == 0:
|
||||
return {"data": []}
|
||||
return {"data": properties.get_event_properties(project_id=projectId, event_name=event_name)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/events/search', tags=["product_analytics"])
|
||||
def search_events(projectId: int, data: schemas.EventsSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": events.search_events(project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/lexicon/events', tags=["product_analytics", "lexicon"])
|
||||
def get_all_lexicon_events(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": events.get_lexicon(project_id=projectId, page=filter_query)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/lexicon/properties', tags=["product_analytics", "lexicon"])
|
||||
def get_all_lexicon_properties(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": properties.get_lexicon(project_id=projectId, page=filter_query)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/events/autocomplete', tags=["autocomplete"])
|
||||
def autocomplete_events(projectId: int, q: Optional[str] = None,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": autocomplete.search_events(project_id=projectId, q=None if not q or len(q) == 0 else q)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/properties/autocomplete', tags=["autocomplete"])
|
||||
def autocomplete_properties(projectId: int, propertyName: Optional[str] = None, eventName: Optional[str] = None,
|
||||
q: Optional[str] = None, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if not propertyName and not eventName and not q:
|
||||
return {"error": ["Specify eventName to get top properties",
|
||||
"Specify propertyName to get top values of that property",
|
||||
"Specify eventName&propertyName to get top values of that property for the selected event"]}
|
||||
return {"data": autocomplete.search_properties(project_id=projectId,
|
||||
event_name=None if not eventName \
|
||||
or len(eventName) == 0 else eventName,
|
||||
property_name=None if not propertyName \
|
||||
or len(propertyName) == 0 else propertyName,
|
||||
q=None if not q or len(q) == 0 else q)}
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
import schemas
|
||||
from chalicelib.core.metrics import product_anaytics2
|
||||
from fastapi import Depends
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
|
||||
@app.post('/{projectId}/events/search', tags=["dashboard"])
|
||||
def search_events(projectId: int,
|
||||
# data: schemas.CreateDashboardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return product_anaytics2.search_events(project_id=projectId, data={})
|
||||
|
|
@ -1,10 +1,12 @@
|
|||
from fastapi import Body, Depends
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import Body, Depends, Query
|
||||
|
||||
import schemas
|
||||
from chalicelib.core.usability_testing import service
|
||||
from chalicelib.core.usability_testing.schema import UTTestCreate, UTTestUpdate, UTTestSearch
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
from schemas import schemas
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
tags = ["usability-tests"]
|
||||
|
|
@ -77,9 +79,7 @@ async def update_ut_test(projectId: int, test_id: int, test_update: UTTestUpdate
|
|||
|
||||
|
||||
@app.get('/{projectId}/usability-tests/{test_id}/sessions', tags=tags)
|
||||
async def get_sessions(projectId: int, test_id: int, page: int = 1, limit: int = 10,
|
||||
live: bool = False,
|
||||
user_id: str = None):
|
||||
async def get_sessions(projectId: int, test_id: int, filter_query: Annotated[schemas.UsabilityTestQuery, Query()]):
|
||||
"""
|
||||
Get sessions related to a specific UT test.
|
||||
|
||||
|
|
@ -87,21 +87,23 @@ async def get_sessions(projectId: int, test_id: int, page: int = 1, limit: int =
|
|||
- **test_id**: The unique identifier of the UT test.
|
||||
"""
|
||||
|
||||
if live:
|
||||
return service.ut_tests_sessions_live(projectId, test_id, page, limit)
|
||||
if filter_query.live:
|
||||
return service.ut_tests_sessions_live(projectId, test_id, filter_query.page, filter_query.limit)
|
||||
else:
|
||||
return service.ut_tests_sessions(projectId, test_id, page, limit, user_id, live)
|
||||
return service.ut_tests_sessions(projectId, test_id, filter_query.page, filter_query.limit,
|
||||
filter_query.user_id, filter_query.live)
|
||||
|
||||
|
||||
@app.get('/{projectId}/usability-tests/{test_id}/responses/{task_id}', tags=tags)
|
||||
async def get_responses(projectId: int, test_id: int, task_id: int, page: int = 1, limit: int = 10, query: str = None):
|
||||
async def get_responses(projectId: int, test_id: int, task_id: int,
|
||||
filter_query: Annotated[schemas.PaginatedSchema, Query()], query: str = None):
|
||||
"""
|
||||
Get responses related to a specific UT test.
|
||||
|
||||
- **project_id**: The unique identifier of the project.
|
||||
- **test_id**: The unique identifier of the UT test.
|
||||
"""
|
||||
return service.get_responses(test_id, task_id, page, limit, query)
|
||||
return service.get_responses(test_id, task_id, filter_query.page, filter_query.limit, query)
|
||||
|
||||
|
||||
@app.get('/{projectId}/usability-tests/{test_id}/statistics', tags=tags)
|
||||
|
|
|
|||
|
|
@ -1,2 +1,4 @@
|
|||
from .schemas import *
|
||||
from .product_analytics import *
|
||||
from . import overrides as _overrides
|
||||
from .schemas import _PaginatedSchema as PaginatedSchema
|
||||
|
|
|
|||
22
api/schemas/product_analytics.py
Normal file
22
api/schemas/product_analytics.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
from typing import Optional, List, Literal, Union, Annotated
|
||||
from pydantic import Field
|
||||
|
||||
from .overrides import BaseModel
|
||||
from .schemas import EventPropertiesSchema, SortOrderType, _TimedSchema, \
|
||||
_PaginatedSchema, PropertyFilterSchema
|
||||
|
||||
|
||||
class EventSearchSchema(BaseModel):
|
||||
is_event: Literal[True] = True
|
||||
name: str = Field(...)
|
||||
properties: Optional[EventPropertiesSchema] = Field(default=None)
|
||||
|
||||
|
||||
ProductAnalyticsGroupedFilter = Annotated[Union[EventSearchSchema, PropertyFilterSchema], \
|
||||
Field(discriminator='is_event')]
|
||||
|
||||
|
||||
class EventsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
||||
filters: List[ProductAnalyticsGroupedFilter] = Field(...)
|
||||
sort: str = Field(default="startTs")
|
||||
order: SortOrderType = Field(default=SortOrderType.DESC)
|
||||
|
|
@ -3,12 +3,13 @@ from typing import Optional, List, Union, Literal
|
|||
|
||||
from pydantic import Field, EmailStr, HttpUrl, SecretStr, AnyHttpUrl
|
||||
from pydantic import field_validator, model_validator, computed_field
|
||||
from pydantic import AfterValidator
|
||||
from pydantic.functional_validators import BeforeValidator
|
||||
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from .overrides import BaseModel, Enum, ORUnion
|
||||
from .transformers_validators import transform_email, remove_whitespace, remove_duplicate_values, single_to_list, \
|
||||
force_is_event, NAME_PATTERN, int_to_string, check_alphanumeric
|
||||
force_is_event, NAME_PATTERN, int_to_string, check_alphanumeric, check_regex
|
||||
|
||||
|
||||
class _GRecaptcha(BaseModel):
|
||||
|
|
@ -404,6 +405,7 @@ class EventType(str, Enum):
|
|||
REQUEST_MOBILE = "requestMobile"
|
||||
ERROR_MOBILE = "errorMobile"
|
||||
SWIPE_MOBILE = "swipeMobile"
|
||||
EVENT = "event"
|
||||
|
||||
|
||||
class PerformanceEventType(str, Enum):
|
||||
|
|
@ -464,6 +466,7 @@ class SearchEventOperator(str, Enum):
|
|||
NOT_CONTAINS = "notContains"
|
||||
STARTS_WITH = "startsWith"
|
||||
ENDS_WITH = "endsWith"
|
||||
PATTERN = "regex"
|
||||
|
||||
|
||||
class ClickEventExtraOperator(str, Enum):
|
||||
|
|
@ -535,7 +538,7 @@ class GraphqlFilterType(str, Enum):
|
|||
class RequestGraphqlFilterSchema(BaseModel):
|
||||
type: Union[FetchFilterType, GraphqlFilterType] = Field(...)
|
||||
value: List[Union[int, str]] = Field(...)
|
||||
operator: Union[SearchEventOperator, MathOperator] = Field(...)
|
||||
operator: Annotated[Union[SearchEventOperator, MathOperator], AfterValidator(check_regex)] = Field(...)
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
|
|
@ -545,7 +548,85 @@ class RequestGraphqlFilterSchema(BaseModel):
|
|||
return values
|
||||
|
||||
|
||||
class SessionSearchEventSchema2(BaseModel):
|
||||
class EventPredefinedPropertyType(str, Enum):
|
||||
TIME = "$time"
|
||||
SOURCE = "$source"
|
||||
DURATION_S = "$duration_s"
|
||||
DESCRIPTION = "description"
|
||||
AUTO_CAPTURED = "$auto_captured"
|
||||
SDK_EDITION = "$sdk_edition"
|
||||
SDK_VERSION = "$sdk_version"
|
||||
DEVICE_ID = "$device_id"
|
||||
OS = "$os"
|
||||
OS_VERSION = "$os_version"
|
||||
BROWSER = "$browser"
|
||||
BROWSER_VERSION = "$browser_version"
|
||||
DEVICE = "$device"
|
||||
SCREEN_HEIGHT = "$screen_height"
|
||||
SCREEN_WIDTH = "$screen_width"
|
||||
CURRENT_URL = "$current_url"
|
||||
INITIAL_REFERRER = "$initial_referrer"
|
||||
REFERRING_DOMAIN = "$referring_domain"
|
||||
REFERRER = "$referrer"
|
||||
INITIAL_REFERRING_DOMAIN = "$initial_referring_domain"
|
||||
SEARCH_ENGINE = "$search_engine"
|
||||
SEARCH_ENGINE_KEYWORD = "$search_engine_keyword"
|
||||
UTM_SOURCE = "utm_source"
|
||||
UTM_MEDIUM = "utm_medium"
|
||||
UTM_CAMPAIGN = "utm_campaign"
|
||||
COUNTRY = "$country"
|
||||
STATE = "$state"
|
||||
CITY = "$city"
|
||||
ISSUE_TYPE = "issue_type"
|
||||
TAGS = "$tags"
|
||||
IMPORT = "$import"
|
||||
|
||||
|
||||
class PropertyType(str, Enum):
|
||||
INT = "int"
|
||||
FLOAT = "float"
|
||||
DATETIME = "datetime"
|
||||
STRING = "string"
|
||||
ARRAY = "array"
|
||||
TUPLE = "tuple"
|
||||
MAP = "map"
|
||||
NESTED = "nested"
|
||||
|
||||
|
||||
class PropertyFilterSchema(BaseModel):
|
||||
is_event: Literal[False] = False
|
||||
name: Union[EventPredefinedPropertyType, str] = Field(...)
|
||||
operator: Union[SearchEventOperator, MathOperator] = Field(...)
|
||||
value: List[Union[int, str]] = Field(...)
|
||||
data_type: PropertyType = Field(default=PropertyType.STRING.value)
|
||||
|
||||
# property_type: Optional[Literal["string", "number", "date"]] = Field(default=None)
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def is_predefined(self) -> bool:
|
||||
return EventPredefinedPropertyType.has_value(self.name)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def transform_name(self):
|
||||
if isinstance(self.name, Enum):
|
||||
self.name = self.name.value
|
||||
return self
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
class EventPropertiesSchema(BaseModel):
|
||||
operator: Literal["and", "or"] = Field(...)
|
||||
filters: List[PropertyFilterSchema] = Field(...)
|
||||
|
||||
|
||||
class SessionSearchEventSchema(BaseModel):
|
||||
is_event: Literal[True] = True
|
||||
value: List[Union[str, int]] = Field(...)
|
||||
type: Union[EventType, PerformanceEventType] = Field(...)
|
||||
|
|
@ -553,6 +634,7 @@ class SessionSearchEventSchema2(BaseModel):
|
|||
source: Optional[List[Union[ErrorSource, int, str]]] = Field(default=None)
|
||||
sourceOperator: Optional[MathOperator] = Field(default=None)
|
||||
filters: Optional[List[RequestGraphqlFilterSchema]] = Field(default_factory=list)
|
||||
properties: Optional[EventPropertiesSchema] = Field(default=None)
|
||||
|
||||
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
|
||||
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
|
||||
|
|
@ -583,6 +665,13 @@ class SessionSearchEventSchema2(BaseModel):
|
|||
f"operator:{self.operator} is only available for event-type: {EventType.CLICK}"
|
||||
return self
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
class SessionSearchFilterSchema(BaseModel):
|
||||
is_event: Literal[False] = False
|
||||
|
|
@ -640,6 +729,13 @@ class SessionSearchFilterSchema(BaseModel):
|
|||
|
||||
return self
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
class _PaginatedSchema(BaseModel):
|
||||
limit: int = Field(default=200, gt=0, le=200)
|
||||
|
|
@ -660,12 +756,12 @@ def add_missing_is_event(values: dict):
|
|||
|
||||
|
||||
# this type is created to allow mixing events&filters and specifying a discriminator
|
||||
GroupedFilterType = Annotated[Union[SessionSearchFilterSchema, SessionSearchEventSchema2],
|
||||
GroupedFilterType = Annotated[Union[SessionSearchFilterSchema, SessionSearchEventSchema],
|
||||
Field(discriminator='is_event'), BeforeValidator(add_missing_is_event)]
|
||||
|
||||
|
||||
class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
||||
events: List[SessionSearchEventSchema2] = Field(default_factory=list, doc_hidden=True)
|
||||
events: List[SessionSearchEventSchema] = Field(default_factory=list, doc_hidden=True)
|
||||
filters: List[GroupedFilterType] = Field(default_factory=list)
|
||||
sort: str = Field(default="startTs")
|
||||
order: SortOrderType = Field(default=SortOrderType.DESC)
|
||||
|
|
@ -690,6 +786,8 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
|||
def add_missing_attributes(cls, values):
|
||||
# in case isEvent is wrong:
|
||||
for f in values.get("filters") or []:
|
||||
if f.get("type") is None:
|
||||
continue
|
||||
if EventType.has_value(f["type"]) and not f.get("isEvent"):
|
||||
f["isEvent"] = True
|
||||
elif FilterType.has_value(f["type"]) and f.get("isEvent"):
|
||||
|
|
@ -715,6 +813,15 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
|||
f["value"] = vals
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def check_pa_event_filter(self):
|
||||
for v in self.filters + self.events:
|
||||
if v.type == EventType.EVENT:
|
||||
assert v.operator in (SearchEventOperator.IS, MathOperator.EQUAL), \
|
||||
"operator must be {SearchEventOperator.IS} or {MathOperator.EQUAL} for EVENT type"
|
||||
assert len(v.value) == 1, "value must have 1 single value for EVENT type"
|
||||
return self
|
||||
|
||||
@model_validator(mode="after")
|
||||
def split_filters_events(self):
|
||||
n_filters = []
|
||||
|
|
@ -795,6 +902,13 @@ class PathAnalysisSubFilterSchema(BaseModel):
|
|||
values["isEvent"] = True
|
||||
return values
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
class _ProductAnalyticsFilter(BaseModel):
|
||||
is_event: Literal[False] = False
|
||||
|
|
@ -805,6 +919,13 @@ class _ProductAnalyticsFilter(BaseModel):
|
|||
|
||||
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
class _ProductAnalyticsEventFilter(BaseModel):
|
||||
is_event: Literal[True] = True
|
||||
|
|
@ -815,6 +936,13 @@ class _ProductAnalyticsEventFilter(BaseModel):
|
|||
|
||||
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
# this type is created to allow mixing events&filters and specifying a discriminator for PathAnalysis series filter
|
||||
ProductAnalyticsFilter = Annotated[Union[_ProductAnalyticsFilter, _ProductAnalyticsEventFilter],
|
||||
|
|
@ -1259,6 +1387,13 @@ class LiveSessionSearchFilterSchema(BaseModel):
|
|||
assert len(self.source) > 0, "source should not be empty for METADATA type"
|
||||
return self
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
class LiveSessionsSearchPayloadSchema(_PaginatedSchema):
|
||||
filters: List[LiveSessionSearchFilterSchema] = Field([])
|
||||
|
|
@ -1384,7 +1519,7 @@ class MetricSearchSchema(_PaginatedSchema):
|
|||
mine_only: bool = Field(default=False)
|
||||
|
||||
|
||||
class _HeatMapSearchEventRaw(SessionSearchEventSchema2):
|
||||
class _HeatMapSearchEventRaw(SessionSearchEventSchema):
|
||||
type: Literal[EventType.LOCATION] = Field(...)
|
||||
|
||||
|
||||
|
|
@ -1509,3 +1644,34 @@ class TagCreate(TagUpdate):
|
|||
|
||||
class ScopeSchema(BaseModel):
|
||||
scope: int = Field(default=1, ge=1, le=2)
|
||||
|
||||
|
||||
class SessionModel(BaseModel):
|
||||
duration: int
|
||||
errorsCount: int
|
||||
eventsCount: int
|
||||
issueScore: int
|
||||
issueTypes: List[IssueType] = Field(default=[])
|
||||
metadata: dict = Field(default={})
|
||||
pagesCount: int
|
||||
platform: str
|
||||
projectId: int
|
||||
sessionId: str
|
||||
startTs: int
|
||||
timezone: Optional[str]
|
||||
userAnonymousId: Optional[str]
|
||||
userBrowser: str
|
||||
userCity: str
|
||||
userCountry: str
|
||||
userDevice: Optional[str]
|
||||
userDeviceType: str
|
||||
userId: Optional[str]
|
||||
userOs: str
|
||||
userState: str
|
||||
userUuid: str
|
||||
viewed: bool = Field(default=False)
|
||||
|
||||
|
||||
class UsabilityTestQuery(_PaginatedSchema):
|
||||
live: bool = Field(default=False)
|
||||
user_id: Optional[str] = Field(default=None)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import re
|
||||
from typing import Union, Any, Type
|
||||
|
||||
from pydantic import ValidationInfo
|
||||
|
|
@ -57,3 +58,17 @@ def check_alphanumeric(v: str, info: ValidationInfo) -> str:
|
|||
is_alphanumeric = v.replace(' ', '').isalnum()
|
||||
assert is_alphanumeric, f'{info.field_name} must be alphanumeric'
|
||||
return v
|
||||
|
||||
|
||||
def check_regex(v: str) -> str:
|
||||
assert v is not None, "Regex is null"
|
||||
assert isinstance(v, str), "Regex value must be a string"
|
||||
assert len(v) > 0, "Regex is empty"
|
||||
is_valid = None
|
||||
try:
|
||||
re.compile(v)
|
||||
except re.error as exc:
|
||||
is_valid = f"Invalid regex: {exc} (at position {exc.pos})"
|
||||
|
||||
assert is_valid is None, is_valid
|
||||
return v
|
||||
|
|
|
|||
61
assist-server/build.sh
Normal file
61
assist-server/build.sh
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh <ee>
|
||||
|
||||
ARCH=${ARCH:-amd64}
|
||||
git_sha=$(git rev-parse --short HEAD)
|
||||
image_tag=${IMAGE_TAG:-git_sha}
|
||||
check_prereq() {
|
||||
which docker || {
|
||||
echo "Docker not installed, please install docker."
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
source ../scripts/lib/_docker.sh
|
||||
|
||||
[[ $PATCH -eq 1 ]] && {
|
||||
image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
image_tag="${image_tag}-ee"
|
||||
}
|
||||
update_helm_release() {
|
||||
chart=$1
|
||||
HELM_TAG="$(grep -iER ^version ../scripts/helmcharts/openreplay/charts/$chart | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
# Update the chart version
|
||||
sed -i "s#^version.*#version: $HELM_TAG# g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Update image tags
|
||||
sed -i "s#ppVersion.*#ppVersion: \"$image_tag\"#g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Commit the changes
|
||||
git add ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
git commit -m "chore(helm): Updating $chart image release"
|
||||
}
|
||||
|
||||
function build_api() {
|
||||
destination="_assist-server_ee"
|
||||
[[ -d ../${destination} ]] && {
|
||||
echo "Removing previous build cache"
|
||||
rm -rf ../${destination}
|
||||
}
|
||||
cp -R ../assist-server ../${destination}
|
||||
cd ../${destination} || exit 1
|
||||
cp -rf ../ee/assist-server/* ./
|
||||
|
||||
docker build -f ./Dockerfile --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/assist-server:${image_tag} .
|
||||
|
||||
cd ../assist-server || exit 1
|
||||
rm -rf ../${destination}
|
||||
[[ $PUSH_IMAGE -eq 1 ]] && {
|
||||
docker push ${DOCKER_REPO:-'local'}/assist-server:${image_tag}
|
||||
docker tag ${DOCKER_REPO:-'local'}/assist-server:${image_tag} ${DOCKER_REPO:-'local'}/assist-server:latest
|
||||
docker push ${DOCKER_REPO:-'local'}/assist-server:latest
|
||||
}
|
||||
[[ $SIGN_IMAGE -eq 1 ]] && {
|
||||
cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/assist-server:${image_tag}
|
||||
}
|
||||
echo "build completed for assist-server"
|
||||
}
|
||||
|
||||
check_prereq
|
||||
build_api $1
|
||||
if [[ $PATCH -eq 1 ]]; then
|
||||
update_helm_release assist-server
|
||||
fi
|
||||
30
backend/Makefile
Normal file
30
backend/Makefile
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
ee ?= "false" # true to build ee
|
||||
app ?= "" # app name, default all
|
||||
arch ?= "amd64" # default amd64
|
||||
docker_runtime ?= "docker" # default docker runtime
|
||||
|
||||
.PHONY: help
|
||||
help: ## Prints help for targets with comments
|
||||
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-25s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
|
||||
|
||||
##@ Docker
|
||||
|
||||
.PHONY: build
|
||||
build: ## Build the backend. ee=true for ee build. app=app name for only one app. Default build all apps.
|
||||
ARCH=$(arch) DOCKER_RUNTIME=$(docker_runtime) bash build.sh $(ee) $(app)
|
||||
|
||||
##@ Local Dev
|
||||
|
||||
.PHONY: scan
|
||||
scan: ## Scan the backend
|
||||
@trivy fs -q .
|
||||
|
||||
.PHONY: update
|
||||
update: ## Update the backend dependecies
|
||||
@echo Updating dependencies
|
||||
@go get -u -v ./...
|
||||
@go mod tidy
|
||||
|
||||
run: ## Run the backend. app=app name for app to run
|
||||
@if [ $(app) == "" ]; then echo "Error: app parameter is required. Usage: make run app=<app_name>"; exit 1; fi
|
||||
@go run "cmd/$(app)/main.go"
|
||||
|
|
@ -2,44 +2,71 @@ package main
|
|||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
|
||||
analyticsConfig "openreplay/backend/internal/config/analytics"
|
||||
"openreplay/backend/pkg/analytics"
|
||||
"openreplay/backend/pkg/db/postgres/pool"
|
||||
"openreplay/backend/pkg/logger"
|
||||
"openreplay/backend/pkg/metrics"
|
||||
"openreplay/backend/pkg/metrics/database"
|
||||
"openreplay/backend/pkg/metrics/web"
|
||||
"openreplay/backend/pkg/server"
|
||||
"openreplay/backend/pkg/server/api"
|
||||
)
|
||||
|
||||
func main() {
|
||||
ctx := context.Background()
|
||||
log := logger.New()
|
||||
cfg := analyticsConfig.New(log)
|
||||
// Observability
|
||||
webMetrics := web.New("analytics")
|
||||
dbMetrics := database.New("analytics")
|
||||
metrics.New(log, append(webMetrics.List(), dbMetrics.List()...))
|
||||
log.Info(ctx, "Cacher service started")
|
||||
|
||||
pgConn, err := pool.New(dbMetrics, cfg.Postgres.String())
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "can't init postgres connection: %s", err)
|
||||
sigchan := make(chan os.Signal, 1)
|
||||
signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)
|
||||
|
||||
for {
|
||||
select {
|
||||
case sig := <-sigchan:
|
||||
log.Error(ctx, "Caught signal %v: terminating", sig)
|
||||
os.Exit(0)
|
||||
}
|
||||
}
|
||||
defer pgConn.Close()
|
||||
|
||||
builder, err := analytics.NewServiceBuilder(log, cfg, webMetrics, dbMetrics, pgConn)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "can't init services: %s", err)
|
||||
}
|
||||
|
||||
router, err := api.NewRouter(&cfg.HTTP, log)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed while creating router: %s", err)
|
||||
}
|
||||
router.AddHandlers(api.NoPrefix, builder.CardsAPI, builder.DashboardsAPI, builder.ChartsAPI)
|
||||
router.AddMiddlewares(builder.Auth.Middleware, builder.RateLimiter.Middleware, builder.AuditTrail.Middleware)
|
||||
|
||||
server.Run(ctx, log, &cfg.HTTP, router)
|
||||
}
|
||||
|
||||
//
|
||||
//import (
|
||||
// "context"
|
||||
//
|
||||
// analyticsConfig "openreplay/backend/internal/config/analytics"
|
||||
// "openreplay/backend/pkg/analytics"
|
||||
// "openreplay/backend/pkg/db/postgres/pool"
|
||||
// "openreplay/backend/pkg/logger"
|
||||
// "openreplay/backend/pkg/metrics"
|
||||
// "openreplay/backend/pkg/metrics/database"
|
||||
// "openreplay/backend/pkg/metrics/web"
|
||||
// "openreplay/backend/pkg/server"
|
||||
// "openreplay/backend/pkg/server/api"
|
||||
//)
|
||||
//
|
||||
//func main() {
|
||||
// ctx := context.Background()
|
||||
// log := logger.New()
|
||||
// cfg := analyticsConfig.New(log)
|
||||
// // Observability
|
||||
// webMetrics := web.New("analytics")
|
||||
// dbMetrics := database.New("analytics")
|
||||
// metrics.New(log, append(webMetrics.List(), dbMetrics.List()...))
|
||||
//
|
||||
// pgConn, err := pool.New(dbMetrics, cfg.Postgres.String())
|
||||
// if err != nil {
|
||||
// log.Fatal(ctx, "can't init postgres connection: %s", err)
|
||||
// }
|
||||
// defer pgConn.Close()
|
||||
//
|
||||
// builder, err := analytics.NewServiceBuilder(log, cfg, webMetrics, dbMetrics, pgConn)
|
||||
// if err != nil {
|
||||
// log.Fatal(ctx, "can't init services: %s", err)
|
||||
// }
|
||||
//
|
||||
// router, err := api.NewRouter(&cfg.HTTP, log)
|
||||
// if err != nil {
|
||||
// log.Fatal(ctx, "failed while creating router: %s", err)
|
||||
// }
|
||||
// router.AddHandlers(api.NoPrefix, builder.CardsAPI, builder.DashboardsAPI, builder.ChartsAPI)
|
||||
// router.AddMiddlewares(builder.Auth.Middleware, builder.RateLimiter.Middleware, builder.AuditTrail.Middleware)
|
||||
//
|
||||
// server.Run(ctx, log, &cfg.HTTP, router)
|
||||
//}
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ func main() {
|
|||
messages.MsgMetadata, messages.MsgIssueEvent, messages.MsgSessionStart, messages.MsgSessionEnd,
|
||||
messages.MsgUserID, messages.MsgUserAnonymousID, messages.MsgIntegrationEvent, messages.MsgPerformanceTrackAggr,
|
||||
messages.MsgJSException, messages.MsgResourceTiming, messages.MsgCustomEvent, messages.MsgCustomIssue,
|
||||
messages.MsgNetworkRequest, messages.MsgGraphQL, messages.MsgStateAction, messages.MsgMouseClick,
|
||||
messages.MsgFetch, messages.MsgNetworkRequest, messages.MsgGraphQL, messages.MsgStateAction, messages.MsgMouseClick,
|
||||
messages.MsgMouseClickDeprecated, messages.MsgSetPageLocation, messages.MsgSetPageLocationDeprecated,
|
||||
messages.MsgPageLoadTiming, messages.MsgPageRenderTiming,
|
||||
messages.MsgPageEvent, messages.MsgPageEventDeprecated, messages.MsgMouseThrashing, messages.MsgInputChange,
|
||||
|
|
|
|||
|
|
@ -100,6 +100,7 @@ func main() {
|
|||
// Process assets
|
||||
if msg.TypeID() == messages.MsgSetNodeAttributeURLBased ||
|
||||
msg.TypeID() == messages.MsgSetCSSDataURLBased ||
|
||||
msg.TypeID() == messages.MsgCSSInsertRuleURLBased ||
|
||||
msg.TypeID() == messages.MsgAdoptedSSReplaceURLBased ||
|
||||
msg.TypeID() == messages.MsgAdoptedSSInsertRuleURLBased {
|
||||
m := msg.Decode()
|
||||
|
|
|
|||
|
|
@ -1,52 +1,54 @@
|
|||
module openreplay/backend
|
||||
|
||||
go 1.23
|
||||
go 1.23.0
|
||||
|
||||
toolchain go1.23.1
|
||||
|
||||
require (
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0
|
||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.0
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.32.1
|
||||
github.com/DataDog/datadog-api-client-go/v2 v2.34.0
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.34.0
|
||||
github.com/DataDog/datadog-api-client-go/v2 v2.37.1
|
||||
github.com/Masterminds/semver v1.5.0
|
||||
github.com/andybalholm/brotli v1.1.1
|
||||
github.com/aws/aws-sdk-go v1.55.6
|
||||
github.com/btcsuite/btcutil v1.0.2
|
||||
github.com/confluentinc/confluent-kafka-go/v2 v2.8.0
|
||||
github.com/confluentinc/confluent-kafka-go/v2 v2.10.0
|
||||
github.com/docker/distribution v2.8.3+incompatible
|
||||
github.com/elastic/go-elasticsearch/v7 v7.17.10
|
||||
github.com/elastic/go-elasticsearch/v8 v8.17.0
|
||||
github.com/getsentry/sentry-go v0.31.1
|
||||
github.com/go-playground/validator/v10 v10.24.0
|
||||
github.com/elastic/go-elasticsearch/v8 v8.18.0
|
||||
github.com/getsentry/sentry-go v0.32.0
|
||||
github.com/go-playground/validator/v10 v10.26.0
|
||||
github.com/go-redis/redis v6.15.9+incompatible
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/gorilla/mux v1.8.1
|
||||
github.com/jackc/pgconn v1.14.3
|
||||
github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438
|
||||
github.com/jackc/pgtype v1.14.4
|
||||
github.com/jackc/pgx/v4 v4.18.3
|
||||
github.com/klauspost/compress v1.17.11
|
||||
github.com/klauspost/compress v1.18.0
|
||||
github.com/klauspost/pgzip v1.2.6
|
||||
github.com/lib/pq v1.10.9
|
||||
github.com/oschwald/maxminddb-golang v1.13.1
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/prometheus/client_golang v1.20.5
|
||||
github.com/prometheus/client_golang v1.22.0
|
||||
github.com/rs/xid v1.6.0
|
||||
github.com/sethvargo/go-envconfig v1.1.0
|
||||
github.com/sethvargo/go-envconfig v1.2.0
|
||||
github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce
|
||||
github.com/ua-parser/uap-go v0.0.0-20250126222208-a52596c19dff
|
||||
github.com/ua-parser/uap-go v0.0.0-20250326155420-f7f5a2f9f5bc
|
||||
go.uber.org/zap v1.27.0
|
||||
golang.org/x/net v0.35.0
|
||||
golang.org/x/net v0.39.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
|
||||
github.com/ClickHouse/ch-go v0.65.0 // indirect
|
||||
github.com/DataDog/zstd v1.5.6 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
||||
github.com/ClickHouse/ch-go v0.65.1 // indirect
|
||||
github.com/DataDog/zstd v1.5.7 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/elastic/elastic-transport-go/v8 v8.6.0 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
|
||||
github.com/elastic/elastic-transport-go/v8 v8.7.0 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.9 // indirect
|
||||
github.com/go-faster/city v1.0.1 // indirect
|
||||
github.com/go-faster/errors v0.7.1 // indirect
|
||||
github.com/go-logr/logr v1.4.2 // indirect
|
||||
|
|
@ -66,23 +68,23 @@ require (
|
|||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/paulmach/orb v0.11.1 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||
github.com/prometheus/client_model v0.6.1 // indirect
|
||||
github.com/prometheus/common v0.62.0 // indirect
|
||||
github.com/prometheus/procfs v0.15.1 // indirect
|
||||
github.com/prometheus/client_model v0.6.2 // indirect
|
||||
github.com/prometheus/common v0.63.0 // indirect
|
||||
github.com/prometheus/procfs v0.16.0 // indirect
|
||||
github.com/segmentio/asm v1.2.0 // indirect
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/sirupsen/logrus v1.9.3 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/otel v1.34.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.34.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.34.0 // indirect
|
||||
go.opentelemetry.io/otel v1.35.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.35.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.35.0 // indirect
|
||||
go.uber.org/multierr v1.11.0 // indirect
|
||||
golang.org/x/crypto v0.33.0 // indirect
|
||||
golang.org/x/oauth2 v0.25.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/text v0.22.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250127172529-29210b9bc287 // indirect
|
||||
google.golang.org/protobuf v1.36.4 // indirect
|
||||
golang.org/x/crypto v0.37.0 // indirect
|
||||
golang.org/x/oauth2 v0.29.0 // indirect
|
||||
golang.org/x/sys v0.32.0 // indirect
|
||||
golang.org/x/text v0.24.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
|
|
|||
|
|
@ -6,10 +6,17 @@ github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkk
|
|||
github.com/AlecAivazis/survey/v2 v2.3.7/go.mod h1:xUTIdE4KCOIjsBAE1JYsUPoCqYdZ1reCfTwbto0Fduo=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 h1:g0EZJwz7xkXQiZAI5xi9f3WWFYBlX1CPTrR+NDToRkQ=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0/go.mod h1:XCW7KnZet0Opnr7HccfUw1PLc4CjHqpcaxW8DHklNkQ=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 h1:B/dfvscEQtew9dVuoxqxrUKKv8Ih2f55PydknDamU+g=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0/go.mod h1:fiPSssYvltE08HJchL04dOy+RD4hgrjph0cwGGMntdI=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2 h1:F0gBpfdPLGsw+nsgk6aqqkZS1jiixa5WwFe3fk/T3Ys=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.0 h1:Bg8m3nq/X1DeePkAbCfb6ml6F3F0IunEhE8TMh+lY48=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.0/go.mod h1:j2chePtV91HrC22tGoRX3sGY42uF13WzmmV80/OdVAA=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 h1:FPKJS1T+clwv+OLGt13a8UjqeRuh0O4SJ3lUriThc+4=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1/go.mod h1:j2chePtV91HrC22tGoRX3sGY42uF13WzmmV80/OdVAA=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.6.0 h1:PiSrjRPpkQNjrM8H0WwKMnZUdu1RGMtd/LdGKUrOo+c=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.6.0/go.mod h1:oDrbWx4ewMylP7xHivfgixbfGBT6APAwsSoHRKotnIc=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.0 h1:UXT0o77lXQrikd1kgwIPQOUect7EoR/+sbP4wQKdzxM=
|
||||
|
|
@ -18,19 +25,28 @@ github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOEl
|
|||
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2 h1:kYRSnvJju5gYVyhkij+RTJ/VR6QIUaCfWeaFm2ycsjQ=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/ClickHouse/ch-go v0.63.1 h1:s2JyZvWLTCSAGdtjMBBmAgQQHMco6pawLJMOXi0FODM=
|
||||
github.com/ClickHouse/ch-go v0.63.1/go.mod h1:I1kJJCL3WJcBMGe1m+HVK0+nREaG+JOYYBWjrDrF3R0=
|
||||
github.com/ClickHouse/ch-go v0.65.0 h1:vZAXfTQliuNNefqkPDewX3kgRxN6Q4vUENnnY+ynTRY=
|
||||
github.com/ClickHouse/ch-go v0.65.0/go.mod h1:tCM0XEH5oWngoi9Iu/8+tjPBo04I/FxNIffpdjtwx3k=
|
||||
github.com/ClickHouse/ch-go v0.65.1 h1:SLuxmLl5Mjj44/XbINsK2HFvzqup0s6rwKLFH347ZhU=
|
||||
github.com/ClickHouse/ch-go v0.65.1/go.mod h1:bsodgURwmrkvkBe5jw1qnGDgyITsYErfONKAHn05nv4=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.30.1 h1:Dy0n0l+cMbPXs8hFkeeWGaPKrB+MDByUNQBSmRO3W6k=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.30.1/go.mod h1:szk8BMoQV/NgHXZ20ZbwDyvPWmpfhRKjFkc6wzASGxM=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.32.1 h1:RLhkxA6iH/bLTXeDtEj/u4yUx9Q03Y95P+cjHScQK78=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.32.1/go.mod h1:YtaiIFlHCGNPbOpAvFGYobtcVnmgYvD/WmzitixxWYc=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.34.0 h1:Y4rqkdrRHgExvC4o/NTbLdY5LFQ3LHS77/RNFxFX3Co=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.34.0/go.mod h1:yioSINoRLVZkLyDzdMXPLRIqhDvel8iLBlwh6Iefso8=
|
||||
github.com/DataDog/datadog-api-client-go/v2 v2.34.0 h1:0VVmv8uZg8vdBuEpiF2nBGUezl2QITrxdEsLgh38j8M=
|
||||
github.com/DataDog/datadog-api-client-go/v2 v2.34.0/go.mod h1:d3tOEgUd2kfsr9uuHQdY+nXrWp4uikgTgVCPdKNK30U=
|
||||
github.com/DataDog/datadog-api-client-go/v2 v2.37.1 h1:weZhrGMO//sMEoSKWngoSQwMp4zBSlEX4p3/YWy9ltw=
|
||||
github.com/DataDog/datadog-api-client-go/v2 v2.37.1/go.mod h1:d3tOEgUd2kfsr9uuHQdY+nXrWp4uikgTgVCPdKNK30U=
|
||||
github.com/DataDog/zstd v1.5.6 h1:LbEglqepa/ipmmQJUDnSsfvA8e8IStVcGaFWDuxvGOY=
|
||||
github.com/DataDog/zstd v1.5.6/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw=
|
||||
github.com/DataDog/zstd v1.5.7 h1:ybO8RBeh29qrxIhCA9E8gKY6xfONU9T6G6aP9DTKfLE=
|
||||
github.com/DataDog/zstd v1.5.7/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw=
|
||||
github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=
|
||||
github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
|
||||
github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
|
||||
|
|
@ -97,6 +113,8 @@ github.com/compose-spec/compose-go/v2 v2.1.3 h1:bD67uqLuL/XgkAK6ir3xZvNLFPxPScEi
|
|||
github.com/compose-spec/compose-go/v2 v2.1.3/go.mod h1:lFN0DrMxIncJGYAXTfWuajfwj5haBJqrBkarHcnjJKc=
|
||||
github.com/confluentinc/confluent-kafka-go/v2 v2.8.0 h1:0HlcSNWg4LpLA9nIjzUMIqWHI+w0S68UN7alXAc3TeA=
|
||||
github.com/confluentinc/confluent-kafka-go/v2 v2.8.0/go.mod h1:hScqtFIGUI1wqHIgM3mjoqEou4VweGGGX7dMpcUKves=
|
||||
github.com/confluentinc/confluent-kafka-go/v2 v2.10.0 h1:TK5CH5RbIj/aVfmJFEsDUT6vD2izac2zmA5BUfAOxC0=
|
||||
github.com/confluentinc/confluent-kafka-go/v2 v2.10.0/go.mod h1:hScqtFIGUI1wqHIgM3mjoqEou4VweGGGX7dMpcUKves=
|
||||
github.com/containerd/console v1.0.4 h1:F2g4+oChYvBTsASRTz8NP6iIAi97J3TtSAsLbIFn4ro=
|
||||
github.com/containerd/console v1.0.4/go.mod h1:YynlIjWYF8myEu6sdkwKIvGQq+cOckRm6So2avqoYAk=
|
||||
github.com/containerd/containerd v1.7.18 h1:jqjZTQNfXGoEaZdW1WwPU0RqSn1Bm2Ay/KJPUuO8nao=
|
||||
|
|
@ -148,10 +166,14 @@ github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203 h1:XBBHcIb256gUJ
|
|||
github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203/go.mod h1:E1jcSv8FaEny+OP/5k9UxZVw9YFWGj7eI4KR/iOBqCg=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.6.0 h1:Y2S/FBjx1LlCv5m6pWAF2kDJAHoSjSRSJCApolgfthA=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.6.0/go.mod h1:YLHer5cj0csTzNFXoNQ8qhtGY1GTvSqPnKWKaqQE3Hk=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.7.0 h1:OgTneVuXP2uip4BA658Xi6Hfw+PeIOod2rY3GVMGoVE=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.7.0/go.mod h1:YLHer5cj0csTzNFXoNQ8qhtGY1GTvSqPnKWKaqQE3Hk=
|
||||
github.com/elastic/go-elasticsearch/v7 v7.17.10 h1:TCQ8i4PmIJuBunvBS6bwT2ybzVFxxUhhltAs3Gyu1yo=
|
||||
github.com/elastic/go-elasticsearch/v7 v7.17.10/go.mod h1:OJ4wdbtDNk5g503kvlHLyErCgQwwzmDtaFC4XyOxXA4=
|
||||
github.com/elastic/go-elasticsearch/v8 v8.17.0 h1:e9cWksE/Fr7urDRmGPGp47Nsp4/mvNOrU8As1l2HQQ0=
|
||||
github.com/elastic/go-elasticsearch/v8 v8.17.0/go.mod h1:lGMlgKIbYoRvay3xWBeKahAiJOgmFDsjZC39nmO3H64=
|
||||
github.com/elastic/go-elasticsearch/v8 v8.18.0 h1:ANNq1h7DEiPUaALb8+5w3baQzaS08WfHV0DNzp0VG4M=
|
||||
github.com/elastic/go-elasticsearch/v8 v8.18.0/go.mod h1:WLqwXsJmQoYkoA9JBFeEwPkQhCfAZuUvfpdU/NvSSf0=
|
||||
github.com/emicklei/go-restful/v3 v3.11.0 h1:rAQeMHw1c7zTmncogyy8VvRZwtkmkZ4FxERmMY4rD+g=
|
||||
github.com/emicklei/go-restful/v3 v3.11.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc=
|
||||
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||
|
|
@ -163,8 +185,12 @@ github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM4
|
|||
github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0=
|
||||
github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM=
|
||||
github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8=
|
||||
github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY=
|
||||
github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok=
|
||||
github.com/getsentry/sentry-go v0.31.1 h1:ELVc0h7gwyhnXHDouXkhqTFSO5oslsRDk0++eyE0KJ4=
|
||||
github.com/getsentry/sentry-go v0.31.1/go.mod h1:CYNcMMz73YigoHljQRG+qPF+eMq8gG72XcGN/p71BAY=
|
||||
github.com/getsentry/sentry-go v0.32.0 h1:YKs+//QmwE3DcYtfKRH8/KyOOF/I6Qnx7qYGNHCGmCY=
|
||||
github.com/getsentry/sentry-go v0.32.0/go.mod h1:CYNcMMz73YigoHljQRG+qPF+eMq8gG72XcGN/p71BAY=
|
||||
github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA=
|
||||
github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og=
|
||||
github.com/go-faster/city v1.0.1 h1:4WAxSZ3V2Ws4QRDrscLEDcibJY8uf41H6AhXDrNDcGw=
|
||||
|
|
@ -194,6 +220,8 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn
|
|||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.24.0 h1:KHQckvo8G6hlWnrPX4NJJ+aBfWNAE/HH+qdL2cBpCmg=
|
||||
github.com/go-playground/validator/v10 v10.24.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1x/7cByt++cQ+YOuDM5wus=
|
||||
github.com/go-playground/validator/v10 v10.26.0 h1:SP05Nqhjcvz81uJaRfEV0YBSSSGMc/iMaVtFbr3Sw2k=
|
||||
github.com/go-playground/validator/v10 v10.26.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
|
||||
github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg=
|
||||
github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
|
|
@ -211,6 +239,8 @@ github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
|
|||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
|
||||
|
|
@ -222,6 +252,7 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
|||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
|
||||
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
|
|
@ -328,6 +359,8 @@ github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6
|
|||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
|
||||
github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
|
||||
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
||||
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
|
||||
github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU=
|
||||
github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
|
|
@ -441,12 +474,20 @@ github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF
|
|||
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
||||
github.com/prometheus/client_golang v1.20.5 h1:cxppBPuYhUnsO6yo/aoRol4L7q7UFfdm+bR9r+8l63Y=
|
||||
github.com/prometheus/client_golang v1.20.5/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE=
|
||||
github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q=
|
||||
github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0=
|
||||
github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E=
|
||||
github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY=
|
||||
github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk=
|
||||
github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE=
|
||||
github.com/prometheus/common v0.62.0 h1:xasJaQlnWAeyHdUBeGjXmutelfJHWMRr+Fg4QszZ2Io=
|
||||
github.com/prometheus/common v0.62.0/go.mod h1:vyBcEuLSvWos9B1+CyL7JZ2up+uFzXhkqml0W5zIY1I=
|
||||
github.com/prometheus/common v0.63.0 h1:YR/EIY1o3mEFP/kZCD7iDMnLPlGyuU2Gb3HIcXnA98k=
|
||||
github.com/prometheus/common v0.63.0/go.mod h1:VVFF/fBIoToEnWRVkYoXEkq3R3paCoxG9PXP74SnV18=
|
||||
github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc=
|
||||
github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk=
|
||||
github.com/prometheus/procfs v0.16.0 h1:xh6oHhKwnOJKMYiYBDWmkHqQPyiY40sny36Cmx2bbsM=
|
||||
github.com/prometheus/procfs v0.16.0/go.mod h1:8veyXUu3nGP7oaCxhX6yeaM5u4stL2FeMXnCqhDthZg=
|
||||
github.com/r3labs/sse v0.0.0-20210224172625-26fe804710bc h1:zAsgcP8MhzAbhMnB1QQ2O7ZhWYVGYSR2iVcjzQuPV+o=
|
||||
github.com/r3labs/sse v0.0.0-20210224172625-26fe804710bc/go.mod h1:S8xSOnV3CgpNrWd0GQ/OoQfMtlg2uPRSuTzcSGrzwK8=
|
||||
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
|
||||
|
|
@ -468,6 +509,8 @@ github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b h1:h+3JX2VoWTFuyQ
|
|||
github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b/go.mod h1:/yeG0My1xr/u+HZrFQ1tOQQQQrOawfyMUH13ai5brBc=
|
||||
github.com/sethvargo/go-envconfig v1.1.0 h1:cWZiJxeTm7AlCvzGXrEXaSTCNgip5oJepekh/BOQuog=
|
||||
github.com/sethvargo/go-envconfig v1.1.0/go.mod h1:JLd0KFWQYzyENqnEPWWZ49i4vzZo/6nRidxI8YvGiHw=
|
||||
github.com/sethvargo/go-envconfig v1.2.0 h1:q3XkOZWkC+G1sMLCrw9oPGTjYexygLOXDmGUit1ti8Q=
|
||||
github.com/sethvargo/go-envconfig v1.2.0/go.mod h1:JLd0KFWQYzyENqnEPWWZ49i4vzZo/6nRidxI8YvGiHw=
|
||||
github.com/shibumi/go-pathspec v1.3.0 h1:QUyMZhFo0Md5B8zV8x2tesohbb5kfbpTi9rBnKh5dkI=
|
||||
github.com/shibumi/go-pathspec v1.3.0/go.mod h1:Xutfslp817l2I1cZvgcfeMQJG5QnU2lh5tVaaMCl3jE=
|
||||
github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI=
|
||||
|
|
@ -528,6 +571,8 @@ github.com/tonistiigi/vt100 v0.0.0-20240514184818-90bafcd6abab h1:H6aJ0yKQ0gF49Q
|
|||
github.com/tonistiigi/vt100 v0.0.0-20240514184818-90bafcd6abab/go.mod h1:ulncasL3N9uLrVann0m+CDlJKWsIAP34MPcOJF6VRvc=
|
||||
github.com/ua-parser/uap-go v0.0.0-20250126222208-a52596c19dff h1:NwMEGwb7JJ8wPjT8OPKP5hO1Xz6AQ7Z00+GLSJfW21s=
|
||||
github.com/ua-parser/uap-go v0.0.0-20250126222208-a52596c19dff/go.mod h1:BUbeWZiieNxAuuADTBNb3/aeje6on3DhU3rpWsQSB1E=
|
||||
github.com/ua-parser/uap-go v0.0.0-20250326155420-f7f5a2f9f5bc h1:reH9QQKGFOq39MYOvU9+SYrB8uzXtWNo51fWK3g0gGc=
|
||||
github.com/ua-parser/uap-go v0.0.0-20250326155420-f7f5a2f9f5bc/go.mod h1:gwANdYmo9R8LLwGnyDFWK2PMsaXXX2HhAvCnb/UhZsM=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
|
||||
github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g=
|
||||
github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8=
|
||||
|
|
@ -557,6 +602,8 @@ go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u
|
|||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw=
|
||||
go.opentelemetry.io/otel v1.34.0 h1:zRLXxLCgL1WyKsPVrgbSdMN4c0FMkDAskSTQP+0hdUY=
|
||||
go.opentelemetry.io/otel v1.34.0/go.mod h1:OWFPOQ+h4G8xpyjgqo4SxJYdDQ/qmRH+wivy7zzx9oI=
|
||||
go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ=
|
||||
go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0 h1:ZtfnDL+tUrs1F0Pzfwbg2d59Gru9NCH3bgSHBM6LDwU=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0/go.mod h1:hG4Fj/y8TR/tlEDREo8tWstl9fO9gcFkn4xrx0Io8xU=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.42.0 h1:NmnYCiR0qNufkldjVvyQfZTHSdzeHoZ41zggMsdMcLM=
|
||||
|
|
@ -571,12 +618,16 @@ go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0 h1:digkE
|
|||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0/go.mod h1:/OpE/y70qVkndM0TrxT4KBoN3RsFZP0QaofcfYrj76I=
|
||||
go.opentelemetry.io/otel/metric v1.34.0 h1:+eTR3U0MyfWjRDhmFMxe2SsW64QrZ84AOhvqS7Y+PoQ=
|
||||
go.opentelemetry.io/otel/metric v1.34.0/go.mod h1:CEDrp0fy2D0MvkXE+dPV7cMi8tWZwX3dmaIhwPOaqHE=
|
||||
go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M=
|
||||
go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE=
|
||||
go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucgoDw=
|
||||
go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.21.0 h1:smhI5oD714d6jHE6Tie36fPx4WDFIg+Y6RfAY4ICcR0=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.21.0/go.mod h1:FJ8RAsoPGv/wYMgBdUJXOm+6pzFY3YdljnXtv1SBE8Q=
|
||||
go.opentelemetry.io/otel/trace v1.34.0 h1:+ouXS2V8Rd4hp4580a8q23bg0azF2nI8cqLYnC8mh/k=
|
||||
go.opentelemetry.io/otel/trace v1.34.0/go.mod h1:Svm7lSjQD7kG7KJ/MUHPVXSDGz2OX4h0M2jHBhmSfRE=
|
||||
go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs=
|
||||
go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc=
|
||||
go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I=
|
||||
go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM=
|
||||
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||
|
|
@ -617,6 +668,10 @@ golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc=
|
|||
golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc=
|
||||
golang.org/x/crypto v0.33.0 h1:IOBPskki6Lysi0lo9qQvbxiQ+FvsCC/YWOecCHAixus=
|
||||
golang.org/x/crypto v0.33.0/go.mod h1:bVdXmD7IV/4GdElGPozy6U7lWdRXA4qyRVGJV57uQ5M=
|
||||
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
|
||||
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
|
||||
golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE=
|
||||
golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc=
|
||||
golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3 h1:hNQpMuAJe5CtcUqCXaWga3FHu+kQvCqcsoVaQgSV60o=
|
||||
golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3/go.mod h1:idGWGoKP1toJGkd5/ig9ZLuPcZBC3ewk7SzmH0uou08=
|
||||
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
|
|
@ -643,8 +698,14 @@ golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
|
|||
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
|
||||
golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8=
|
||||
golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk=
|
||||
golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8=
|
||||
golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
|
||||
golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY=
|
||||
golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E=
|
||||
golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70=
|
||||
golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.29.0 h1:WdYw2tdTK1S8olAzWHdgeqfy+Mtm9XNhv/xJsY65d98=
|
||||
golang.org/x/oauth2 v0.29.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
|
|
@ -679,6 +740,10 @@ golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
|||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
|
||||
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20=
|
||||
golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
|
|
@ -700,6 +765,10 @@ golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
|
|||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
|
||||
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
|
||||
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
|
||||
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
|
||||
golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0=
|
||||
golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU=
|
||||
golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U=
|
||||
golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
|
|
@ -727,12 +796,18 @@ google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237 h1:
|
|||
google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237/go.mod h1:Z5Iiy3jtmioajWHDGFk7CeugTyHtPvMHA4UTmUkyalE=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250127172529-29210b9bc287 h1:J1H9f+LEdWAfHcez/4cvaVBox7cOYT+IU6rgqj5x++8=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250127172529-29210b9bc287/go.mod h1:8BS3B93F/U1juMFq9+EDk+qOT5CO1R9IzXxG3PTqiRk=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e h1:ztQaXfzEXTmCBvbtWYRhJxW+0iJcz2qXfd38/e9l7bA=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
|
||||
google.golang.org/grpc v1.64.1 h1:LKtvyfbX3UGVPFcGqJ9ItpVWW6oN/2XqTxfAnwRRXiA=
|
||||
google.golang.org/grpc v1.64.1/go.mod h1:hiQF4LFZelK2WKaP6W0L92zGHtiQdZxk8CrSdvyjeP0=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.36.4 h1:6A3ZDJHn/eNqc1i+IdefRzy/9PokBTPvcqMySR7NNIM=
|
||||
google.golang.org/protobuf v1.36.4/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
||||
google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM=
|
||||
google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
||||
google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
|
||||
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
|
||||
gopkg.in/cenkalti/backoff.v1 v1.1.0 h1:Arh75ttbsvlpVA7WtVpH4u9h6Zl46xuptxqLxPiSo4Y=
|
||||
gopkg.in/cenkalti/backoff.v1 v1.1.0/go.mod h1:J6Vskwqd+OMVJl8C33mmtxTBs2gyzfv7UDAkHu8BrjI=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
|
|
|
|||
|
|
@ -3,11 +3,10 @@ package datasaver
|
|||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"openreplay/backend/pkg/db/types"
|
||||
|
||||
"openreplay/backend/internal/config/db"
|
||||
"openreplay/backend/pkg/db/clickhouse"
|
||||
"openreplay/backend/pkg/db/postgres"
|
||||
"openreplay/backend/pkg/db/types"
|
||||
"openreplay/backend/pkg/logger"
|
||||
. "openreplay/backend/pkg/messages"
|
||||
queue "openreplay/backend/pkg/queue/types"
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ package datasaver
|
|||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"openreplay/backend/pkg/db/postgres"
|
||||
"openreplay/backend/pkg/db/types"
|
||||
"openreplay/backend/pkg/messages"
|
||||
|
|
|
|||
|
|
@ -133,6 +133,17 @@ func (e *AssetsCache) ParseAssets(msg messages.Message) messages.Message {
|
|||
}
|
||||
newMsg.SetMeta(msg.Meta())
|
||||
return newMsg
|
||||
case *messages.CSSInsertRuleURLBased:
|
||||
if e.shouldSkipAsset(m.BaseURL) {
|
||||
return msg
|
||||
}
|
||||
newMsg := &messages.CSSInsertRule{
|
||||
ID: m.ID,
|
||||
Index: m.Index,
|
||||
Rule: e.handleCSS(m.SessionID(), m.BaseURL, m.Rule),
|
||||
}
|
||||
newMsg.SetMeta(msg.Meta())
|
||||
return newMsg
|
||||
case *messages.AdoptedSSReplaceURLBased:
|
||||
if e.shouldSkipAsset(m.BaseURL) {
|
||||
return msg
|
||||
|
|
|
|||
|
|
@ -108,15 +108,15 @@ func (c *connectorImpl) newBatch(name, query string) error {
|
|||
var batches = map[string]string{
|
||||
"sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, platform, timezone, utm_source, utm_medium, utm_campaign) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?)",
|
||||
"autocompletes": "INSERT INTO experimental.autocomplete (project_id, type, value) VALUES (?, ?, SUBSTR(?, 1, 8000))",
|
||||
"pages": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"clicks": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"inputs": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$duration_s", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"errors": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", error_id, "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"performance": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"requests": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$duration_s", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"custom": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"graphql": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"issuesEvents": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", issue_type, issue_id, "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"pages": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"clicks": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"inputs": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$duration_s", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"errors": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", error_id, "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"performance": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"requests": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$duration_s", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"custom": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"graphql": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"issuesEvents": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", issue_type, issue_id, "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"issues": "INSERT INTO experimental.issues (project_id, issue_id, type, context_string) VALUES (?, ?, ?, ?)",
|
||||
"mobile_sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, platform, timezone) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?, ?)",
|
||||
"mobile_custom": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
|
|
@ -251,6 +251,7 @@ func (c *connectorImpl) InsertWebInputDuration(session *sessions.Session, msg *m
|
|||
"hesitation_time": nullableUint32(uint32(msg.HesitationTime)),
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal input event: %s", err)
|
||||
|
|
@ -267,6 +268,13 @@ func (c *connectorImpl) InsertWebInputDuration(session *sessions.Session, msg *m
|
|||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.Url),
|
||||
nullableUint16(uint16(msg.InputDuration)),
|
||||
jsonString,
|
||||
); err != nil {
|
||||
|
|
@ -291,6 +299,7 @@ func (c *connectorImpl) InsertMouseThrashing(session *sessions.Session, msg *mes
|
|||
"url_hostpath": hostpath,
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal issue event: %s", err)
|
||||
|
|
@ -307,6 +316,13 @@ func (c *connectorImpl) InsertMouseThrashing(session *sessions.Session, msg *mes
|
|||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.Url),
|
||||
"mouse_thrashing",
|
||||
issueID,
|
||||
jsonString,
|
||||
|
|
@ -347,6 +363,7 @@ func (c *connectorImpl) InsertIssue(session *sessions.Session, msg *messages.Iss
|
|||
"url_hostpath": hostpath,
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal issue event: %s", err)
|
||||
|
|
@ -363,6 +380,13 @@ func (c *connectorImpl) InsertIssue(session *sessions.Session, msg *messages.Iss
|
|||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.Url),
|
||||
msg.Type,
|
||||
issueID,
|
||||
jsonString,
|
||||
|
|
@ -436,6 +460,7 @@ func (c *connectorImpl) InsertWebPageEvent(session *sessions.Session, msg *messa
|
|||
"load_event_time": loadEventTime,
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal page event: %s", err)
|
||||
|
|
@ -452,6 +477,12 @@ func (c *connectorImpl) InsertWebPageEvent(session *sessions.Session, msg *messa
|
|||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.URL),
|
||||
jsonString,
|
||||
); err != nil {
|
||||
|
|
@ -496,6 +527,7 @@ func (c *connectorImpl) InsertWebClickEvent(session *sessions.Session, msg *mess
|
|||
"url_hostpath": hostpath,
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal click event: %s", err)
|
||||
|
|
@ -512,6 +544,12 @@ func (c *connectorImpl) InsertWebClickEvent(session *sessions.Session, msg *mess
|
|||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.Url),
|
||||
jsonString,
|
||||
); err != nil {
|
||||
|
|
@ -535,6 +573,7 @@ func (c *connectorImpl) InsertWebErrorEvent(session *sessions.Session, msg *type
|
|||
"message": msg.Message,
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal error event: %s", err)
|
||||
|
|
@ -551,6 +590,13 @@ func (c *connectorImpl) InsertWebErrorEvent(session *sessions.Session, msg *type
|
|||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.Url),
|
||||
msgID,
|
||||
jsonString,
|
||||
); err != nil {
|
||||
|
|
@ -585,6 +631,7 @@ func (c *connectorImpl) InsertWebPerformanceTrackAggr(session *sessions.Session,
|
|||
"max_used_js_heap_size": msg.MaxUsedJSHeapSize,
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal performance event: %s", err)
|
||||
|
|
@ -601,6 +648,13 @@ func (c *connectorImpl) InsertWebPerformanceTrackAggr(session *sessions.Session,
|
|||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.Url),
|
||||
jsonString,
|
||||
); err != nil {
|
||||
c.checkError("performance", err)
|
||||
|
|
@ -636,6 +690,7 @@ func (c *connectorImpl) InsertRequest(session *sessions.Session, msg *messages.N
|
|||
"url_hostpath": hostpath,
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal request event: %s", err)
|
||||
|
|
@ -652,6 +707,13 @@ func (c *connectorImpl) InsertRequest(session *sessions.Session, msg *messages.N
|
|||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.URL),
|
||||
nullableUint16(uint16(msg.Duration)),
|
||||
jsonString,
|
||||
); err != nil {
|
||||
|
|
@ -663,10 +725,10 @@ func (c *connectorImpl) InsertRequest(session *sessions.Session, msg *messages.N
|
|||
|
||||
func (c *connectorImpl) InsertCustom(session *sessions.Session, msg *messages.CustomEvent) error {
|
||||
jsonString, err := json.Marshal(map[string]interface{}{
|
||||
"name": msg.Name,
|
||||
"payload": msg.Payload,
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal custom event: %s", err)
|
||||
|
|
@ -676,13 +738,20 @@ func (c *connectorImpl) InsertCustom(session *sessions.Session, msg *messages.Cu
|
|||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
getUUID(msg),
|
||||
"CUSTOM",
|
||||
msg.Name,
|
||||
eventTime,
|
||||
eventTime.Unix(),
|
||||
session.UserUUID,
|
||||
true,
|
||||
false,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.Url),
|
||||
jsonString,
|
||||
); err != nil {
|
||||
c.checkError("custom", err)
|
||||
|
|
@ -698,6 +767,7 @@ func (c *connectorImpl) InsertGraphQL(session *sessions.Session, msg *messages.G
|
|||
"response_body": nullableString(msg.Response),
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal graphql event: %s", err)
|
||||
|
|
@ -714,6 +784,13 @@ func (c *connectorImpl) InsertGraphQL(session *sessions.Session, msg *messages.G
|
|||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.Url),
|
||||
jsonString,
|
||||
); err != nil {
|
||||
c.checkError("graphql", err)
|
||||
|
|
|
|||
|
|
@ -84,7 +84,10 @@ func (p *poolImpl) Begin() (*Tx, error) {
|
|||
tx, err := p.conn.Begin(context.Background())
|
||||
p.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "begin", "")
|
||||
p.metrics.IncreaseTotalRequests("begin", "")
|
||||
return &Tx{tx, p.metrics}, err
|
||||
return &Tx{
|
||||
origTx: tx,
|
||||
metrics: p.metrics,
|
||||
}, err
|
||||
}
|
||||
|
||||
func (p *poolImpl) Close() {
|
||||
|
|
@ -94,13 +97,13 @@ func (p *poolImpl) Close() {
|
|||
// TX - start
|
||||
|
||||
type Tx struct {
|
||||
pgx.Tx
|
||||
origTx pgx.Tx
|
||||
metrics database.Database
|
||||
}
|
||||
|
||||
func (tx *Tx) TxExec(sql string, args ...interface{}) error {
|
||||
start := time.Now()
|
||||
_, err := tx.Exec(context.Background(), sql, args...)
|
||||
_, err := tx.origTx.Exec(context.Background(), sql, args...)
|
||||
method, table := methodName(sql)
|
||||
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table)
|
||||
tx.metrics.IncreaseTotalRequests(method, table)
|
||||
|
|
@ -109,7 +112,7 @@ func (tx *Tx) TxExec(sql string, args ...interface{}) error {
|
|||
|
||||
func (tx *Tx) TxQueryRow(sql string, args ...interface{}) pgx.Row {
|
||||
start := time.Now()
|
||||
res := tx.QueryRow(context.Background(), sql, args...)
|
||||
res := tx.origTx.QueryRow(context.Background(), sql, args...)
|
||||
method, table := methodName(sql)
|
||||
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table)
|
||||
tx.metrics.IncreaseTotalRequests(method, table)
|
||||
|
|
@ -118,7 +121,7 @@ func (tx *Tx) TxQueryRow(sql string, args ...interface{}) pgx.Row {
|
|||
|
||||
func (tx *Tx) TxRollback() error {
|
||||
start := time.Now()
|
||||
err := tx.Rollback(context.Background())
|
||||
err := tx.origTx.Rollback(context.Background())
|
||||
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "rollback", "")
|
||||
tx.metrics.IncreaseTotalRequests("rollback", "")
|
||||
return err
|
||||
|
|
@ -126,7 +129,7 @@ func (tx *Tx) TxRollback() error {
|
|||
|
||||
func (tx *Tx) TxCommit() error {
|
||||
start := time.Now()
|
||||
err := tx.Commit(context.Background())
|
||||
err := tx.origTx.Commit(context.Background())
|
||||
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "commit", "")
|
||||
tx.metrics.IncreaseTotalRequests("commit", "")
|
||||
return err
|
||||
|
|
|
|||
|
|
@ -5,10 +5,11 @@ import (
|
|||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/google/uuid"
|
||||
"hash/fnv"
|
||||
"strconv"
|
||||
|
||||
"github.com/google/uuid"
|
||||
|
||||
. "openreplay/backend/pkg/messages"
|
||||
)
|
||||
|
||||
|
|
@ -23,41 +24,8 @@ type ErrorEvent struct {
|
|||
Payload string
|
||||
Tags map[string]*string
|
||||
OriginType int
|
||||
}
|
||||
|
||||
func unquote(s string) string {
|
||||
if s[0] == '"' {
|
||||
return s[1 : len(s)-1]
|
||||
}
|
||||
return s
|
||||
}
|
||||
func parseTags(tagsJSON string) (tags map[string]*string, err error) {
|
||||
if len(tagsJSON) == 0 {
|
||||
return nil, fmt.Errorf("empty tags")
|
||||
}
|
||||
if tagsJSON[0] == '[' {
|
||||
var tagsArr []json.RawMessage
|
||||
if err = json.Unmarshal([]byte(tagsJSON), &tagsArr); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
tags = make(map[string]*string)
|
||||
for _, keyBts := range tagsArr {
|
||||
tags[unquote(string(keyBts))] = nil
|
||||
}
|
||||
} else if tagsJSON[0] == '{' {
|
||||
var tagsObj map[string]json.RawMessage
|
||||
if err = json.Unmarshal([]byte(tagsJSON), &tagsObj); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
tags = make(map[string]*string)
|
||||
for key, valBts := range tagsObj {
|
||||
val := unquote(string(valBts))
|
||||
tags[key] = &val
|
||||
}
|
||||
}
|
||||
return
|
||||
Url string
|
||||
PageTitle string
|
||||
}
|
||||
|
||||
func WrapJSException(m *JSException) (*ErrorEvent, error) {
|
||||
|
|
@ -69,6 +37,8 @@ func WrapJSException(m *JSException) (*ErrorEvent, error) {
|
|||
Message: m.Message,
|
||||
Payload: m.Payload,
|
||||
OriginType: m.TypeID(),
|
||||
Url: m.Url,
|
||||
PageTitle: m.PageTitle,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
|
@ -81,6 +51,8 @@ func WrapIntegrationEvent(m *IntegrationEvent) *ErrorEvent {
|
|||
Message: m.Message,
|
||||
Payload: m.Payload,
|
||||
OriginType: m.TypeID(),
|
||||
Url: m.Url,
|
||||
PageTitle: m.PageTitle,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -77,6 +77,8 @@ func (d *DeadClickDetector) Handle(message Message, timestamp uint64) Message {
|
|||
*MoveNode,
|
||||
*RemoveNode,
|
||||
*SetCSSData,
|
||||
*CSSInsertRule,
|
||||
*CSSDeleteRule,
|
||||
*SetInputValue,
|
||||
*SetInputChecked:
|
||||
return d.Build()
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
package messages
|
||||
|
||||
func IsReplayerType(id int) bool {
|
||||
return 1 != id && 17 != id && 23 != id && 24 != id && 26 != id && 27 != id && 28 != id && 29 != id && 30 != id && 31 != id && 32 != id && 33 != id && 42 != id && 56 != id && 63 != id && 64 != id && 66 != id && 78 != id && 81 != id && 82 != id && 112 != id && 115 != id && 124 != id && 125 != id && 126 != id && 127 != id && 90 != id && 91 != id && 92 != id && 94 != id && 95 != id && 97 != id && 98 != id && 107 != id && 110 != id
|
||||
return 1 != id && 3 != id && 17 != id && 23 != id && 24 != id && 25 != id && 26 != id && 27 != id && 28 != id && 29 != id && 30 != id && 31 != id && 32 != id && 33 != id && 42 != id && 56 != id && 62 != id && 63 != id && 64 != id && 66 != id && 78 != id && 80 != id && 81 != id && 82 != id && 112 != id && 115 != id && 124 != id && 125 != id && 126 != id && 127 != id && 90 != id && 91 != id && 92 != id && 94 != id && 95 != id && 97 != id && 98 != id && 107 != id && 110 != id
|
||||
}
|
||||
|
||||
func IsMobileType(id int) bool {
|
||||
|
|
@ -10,5 +10,5 @@ func IsMobileType(id int) bool {
|
|||
}
|
||||
|
||||
func IsDOMType(id int) bool {
|
||||
return 0 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 34 == id || 35 == id || 49 == id || 50 == id || 51 == id || 43 == id || 52 == id || 54 == id || 55 == id || 57 == id || 58 == id || 60 == id || 61 == id || 68 == id || 69 == id || 70 == id || 71 == id || 72 == id || 73 == id || 74 == id || 75 == id || 76 == id || 77 == id || 113 == id || 114 == id || 117 == id || 118 == id || 119 == id || 122 == id || 93 == id || 96 == id || 100 == id || 101 == id || 102 == id || 103 == id || 104 == id || 105 == id || 106 == id || 111 == id
|
||||
return 0 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 34 == id || 35 == id || 37 == id || 38 == id || 49 == id || 50 == id || 51 == id || 43 == id || 52 == id || 54 == id || 55 == id || 57 == id || 58 == id || 59 == id || 60 == id || 61 == id || 67 == id || 68 == id || 69 == id || 70 == id || 71 == id || 72 == id || 73 == id || 74 == id || 75 == id || 76 == id || 77 == id || 113 == id || 114 == id || 117 == id || 118 == id || 119 == id || 122 == id || 93 == id || 96 == id || 100 == id || 101 == id || 102 == id || 103 == id || 104 == id || 105 == id || 106 == id || 111 == id
|
||||
}
|
||||
|
|
|
|||
|
|
@ -44,8 +44,9 @@ func NewMessageIterator(log logger.Logger, messageHandler MessageHandler, messag
|
|||
iter.filter = filter
|
||||
}
|
||||
iter.preFilter = map[int]struct{}{
|
||||
MsgBatchMetadata: {}, MsgTimestamp: {}, MsgSessionStart: {},
|
||||
MsgSessionEnd: {}, MsgSetPageLocation: {}, MsgMobileBatchMeta: {},
|
||||
MsgBatchMetadata: {}, MsgBatchMeta: {}, MsgTimestamp: {},
|
||||
MsgSessionStart: {}, MsgSessionEnd: {}, MsgSetPageLocation: {},
|
||||
MsgMobileBatchMeta: {},
|
||||
}
|
||||
return iter
|
||||
}
|
||||
|
|
@ -151,6 +152,20 @@ func (i *messageIteratorImpl) preprocessing(msg Message) error {
|
|||
i.version = m.Version
|
||||
i.batchInfo.version = m.Version
|
||||
|
||||
case *BatchMeta: // Is not required to be present in batch since Mobile doesn't have it (though we might change it)
|
||||
if i.messageInfo.Index > 1 { // Might be several 0-0 BatchMeta in a row without an error though
|
||||
return fmt.Errorf("batchMeta found at the end of the batch, info: %s", i.batchInfo.Info())
|
||||
}
|
||||
i.messageInfo.Index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha)
|
||||
i.messageInfo.Timestamp = uint64(m.Timestamp)
|
||||
if m.Timestamp == 0 {
|
||||
i.zeroTsLog("BatchMeta")
|
||||
}
|
||||
// Try to get saved session's page url
|
||||
if savedURL := i.urls.Get(i.messageInfo.batch.sessionID); savedURL != "" {
|
||||
i.messageInfo.Url = savedURL
|
||||
}
|
||||
|
||||
case *Timestamp:
|
||||
i.messageInfo.Timestamp = m.Timestamp
|
||||
if m.Timestamp == 0 {
|
||||
|
|
@ -176,6 +191,7 @@ func (i *messageIteratorImpl) preprocessing(msg Message) error {
|
|||
|
||||
case *SetPageLocation:
|
||||
i.messageInfo.Url = m.URL
|
||||
i.messageInfo.PageTitle = m.DocumentTitle
|
||||
// Save session page url in cache for using in next batches
|
||||
i.urls.Set(i.messageInfo.batch.sessionID, m.URL)
|
||||
|
||||
|
|
|
|||
|
|
@ -2,6 +2,34 @@ package messages
|
|||
|
||||
func transformDeprecated(msg Message) Message {
|
||||
switch m := msg.(type) {
|
||||
case *JSExceptionDeprecated:
|
||||
return &JSException{
|
||||
Name: m.Name,
|
||||
Message: m.Message,
|
||||
Payload: m.Payload,
|
||||
Metadata: "{}",
|
||||
}
|
||||
case *Fetch:
|
||||
return &NetworkRequest{
|
||||
Type: "fetch",
|
||||
Method: m.Method,
|
||||
URL: m.URL,
|
||||
Request: m.Request,
|
||||
Response: m.Response,
|
||||
Status: m.Status,
|
||||
Timestamp: m.Timestamp,
|
||||
Duration: m.Duration,
|
||||
}
|
||||
case *IssueEventDeprecated:
|
||||
return &IssueEvent{
|
||||
MessageID: m.MessageID,
|
||||
Timestamp: m.Timestamp,
|
||||
Type: m.Type,
|
||||
ContextString: m.ContextString,
|
||||
Context: m.Context,
|
||||
Payload: m.Payload,
|
||||
URL: "",
|
||||
}
|
||||
case *ResourceTimingDeprecated:
|
||||
return &ResourceTiming{
|
||||
Timestamp: m.Timestamp,
|
||||
|
|
|
|||
|
|
@ -54,6 +54,7 @@ type message struct {
|
|||
Timestamp uint64
|
||||
Index uint64
|
||||
Url string
|
||||
PageTitle string
|
||||
batch *BatchInfo
|
||||
}
|
||||
|
||||
|
|
@ -70,6 +71,7 @@ func (m *message) SetMeta(origin *message) {
|
|||
m.Timestamp = origin.Timestamp
|
||||
m.Index = origin.Index
|
||||
m.Url = origin.Url
|
||||
m.PageTitle = origin.PageTitle
|
||||
}
|
||||
|
||||
func (m *message) SessionID() uint64 {
|
||||
|
|
|
|||
|
|
@ -2,114 +2,125 @@
|
|||
package messages
|
||||
|
||||
const (
|
||||
MsgTimestamp = 0
|
||||
MsgSessionStart = 1
|
||||
MsgSetPageLocationDeprecated = 4
|
||||
MsgSetViewportSize = 5
|
||||
MsgSetViewportScroll = 6
|
||||
MsgCreateDocument = 7
|
||||
MsgCreateElementNode = 8
|
||||
MsgCreateTextNode = 9
|
||||
MsgMoveNode = 10
|
||||
MsgRemoveNode = 11
|
||||
MsgSetNodeAttribute = 12
|
||||
MsgRemoveNodeAttribute = 13
|
||||
MsgSetNodeData = 14
|
||||
MsgSetCSSData = 15
|
||||
MsgSetNodeScroll = 16
|
||||
MsgSetInputTarget = 17
|
||||
MsgSetInputValue = 18
|
||||
MsgSetInputChecked = 19
|
||||
MsgMouseMove = 20
|
||||
MsgNetworkRequestDeprecated = 21
|
||||
MsgConsoleLog = 22
|
||||
MsgPageLoadTiming = 23
|
||||
MsgPageRenderTiming = 24
|
||||
MsgIntegrationEvent = 26
|
||||
MsgCustomEvent = 27
|
||||
MsgUserID = 28
|
||||
MsgUserAnonymousID = 29
|
||||
MsgMetadata = 30
|
||||
MsgPageEventDeprecated = 31
|
||||
MsgInputEvent = 32
|
||||
MsgPageEvent = 33
|
||||
MsgStringDictGlobal = 34
|
||||
MsgSetNodeAttributeDictGlobal = 35
|
||||
MsgProfiler = 40
|
||||
MsgOTable = 41
|
||||
MsgStateAction = 42
|
||||
MsgReduxDeprecated = 44
|
||||
MsgVuex = 45
|
||||
MsgMobX = 46
|
||||
MsgNgRx = 47
|
||||
MsgGraphQLDeprecated = 48
|
||||
MsgPerformanceTrack = 49
|
||||
MsgStringDictDeprecated = 50
|
||||
MsgSetNodeAttributeDictDeprecated = 51
|
||||
MsgStringDict = 43
|
||||
MsgSetNodeAttributeDict = 52
|
||||
MsgResourceTimingDeprecated = 53
|
||||
MsgConnectionInformation = 54
|
||||
MsgSetPageVisibility = 55
|
||||
MsgPerformanceTrackAggr = 56
|
||||
MsgLoadFontFace = 57
|
||||
MsgSetNodeFocus = 58
|
||||
MsgSetNodeAttributeURLBased = 60
|
||||
MsgSetCSSDataURLBased = 61
|
||||
MsgTechnicalInfo = 63
|
||||
MsgCustomIssue = 64
|
||||
MsgAssetCache = 66
|
||||
MsgMouseClick = 68
|
||||
MsgMouseClickDeprecated = 69
|
||||
MsgCreateIFrameDocument = 70
|
||||
MsgAdoptedSSReplaceURLBased = 71
|
||||
MsgAdoptedSSReplace = 72
|
||||
MsgAdoptedSSInsertRuleURLBased = 73
|
||||
MsgAdoptedSSInsertRule = 74
|
||||
MsgAdoptedSSDeleteRule = 75
|
||||
MsgAdoptedSSAddOwner = 76
|
||||
MsgAdoptedSSRemoveOwner = 77
|
||||
MsgJSException = 78
|
||||
MsgZustand = 79
|
||||
MsgBatchMetadata = 81
|
||||
MsgPartitionedMessage = 82
|
||||
MsgNetworkRequest = 83
|
||||
MsgWSChannel = 84
|
||||
MsgInputChange = 112
|
||||
MsgSelectionChange = 113
|
||||
MsgMouseThrashing = 114
|
||||
MsgUnbindNodes = 115
|
||||
MsgResourceTiming = 116
|
||||
MsgTabChange = 117
|
||||
MsgTabData = 118
|
||||
MsgCanvasNode = 119
|
||||
MsgTagTrigger = 120
|
||||
MsgRedux = 121
|
||||
MsgSetPageLocation = 122
|
||||
MsgGraphQL = 123
|
||||
MsgWebVitals = 124
|
||||
MsgIssueEvent = 125
|
||||
MsgSessionEnd = 126
|
||||
MsgSessionSearch = 127
|
||||
MsgMobileSessionStart = 90
|
||||
MsgMobileSessionEnd = 91
|
||||
MsgMobileMetadata = 92
|
||||
MsgMobileEvent = 93
|
||||
MsgMobileUserID = 94
|
||||
MsgMobileUserAnonymousID = 95
|
||||
MsgMobileScreenChanges = 96
|
||||
MsgMobileCrash = 97
|
||||
MsgMobileViewComponentEvent = 98
|
||||
MsgMobileClickEvent = 100
|
||||
MsgMobileInputEvent = 101
|
||||
MsgMobilePerformanceEvent = 102
|
||||
MsgMobileLog = 103
|
||||
MsgMobileInternalError = 104
|
||||
MsgMobileNetworkCall = 105
|
||||
MsgMobileSwipeEvent = 106
|
||||
MsgMobileBatchMeta = 107
|
||||
MsgMobilePerformanceAggregated = 110
|
||||
MsgMobileIssueEvent = 111
|
||||
MsgTimestamp = 0
|
||||
MsgSessionStart = 1
|
||||
MsgSessionEndDeprecated = 3
|
||||
MsgSetPageLocationDeprecated = 4
|
||||
MsgSetViewportSize = 5
|
||||
MsgSetViewportScroll = 6
|
||||
MsgCreateDocument = 7
|
||||
MsgCreateElementNode = 8
|
||||
MsgCreateTextNode = 9
|
||||
MsgMoveNode = 10
|
||||
MsgRemoveNode = 11
|
||||
MsgSetNodeAttribute = 12
|
||||
MsgRemoveNodeAttribute = 13
|
||||
MsgSetNodeData = 14
|
||||
MsgSetCSSData = 15
|
||||
MsgSetNodeScroll = 16
|
||||
MsgSetInputTarget = 17
|
||||
MsgSetInputValue = 18
|
||||
MsgSetInputChecked = 19
|
||||
MsgMouseMove = 20
|
||||
MsgNetworkRequestDeprecated = 21
|
||||
MsgConsoleLog = 22
|
||||
MsgPageLoadTiming = 23
|
||||
MsgPageRenderTiming = 24
|
||||
MsgJSExceptionDeprecated = 25
|
||||
MsgIntegrationEvent = 26
|
||||
MsgCustomEvent = 27
|
||||
MsgUserID = 28
|
||||
MsgUserAnonymousID = 29
|
||||
MsgMetadata = 30
|
||||
MsgPageEventDeprecated = 31
|
||||
MsgInputEvent = 32
|
||||
MsgPageEvent = 33
|
||||
MsgStringDictGlobal = 34
|
||||
MsgSetNodeAttributeDictGlobal = 35
|
||||
MsgCSSInsertRule = 37
|
||||
MsgCSSDeleteRule = 38
|
||||
MsgFetch = 39
|
||||
MsgProfiler = 40
|
||||
MsgOTable = 41
|
||||
MsgStateAction = 42
|
||||
MsgReduxDeprecated = 44
|
||||
MsgVuex = 45
|
||||
MsgMobX = 46
|
||||
MsgNgRx = 47
|
||||
MsgGraphQLDeprecated = 48
|
||||
MsgPerformanceTrack = 49
|
||||
MsgStringDictDeprecated = 50
|
||||
MsgSetNodeAttributeDictDeprecated = 51
|
||||
MsgStringDict = 43
|
||||
MsgSetNodeAttributeDict = 52
|
||||
MsgResourceTimingDeprecatedDeprecated = 53
|
||||
MsgConnectionInformation = 54
|
||||
MsgSetPageVisibility = 55
|
||||
MsgPerformanceTrackAggr = 56
|
||||
MsgLoadFontFace = 57
|
||||
MsgSetNodeFocus = 58
|
||||
MsgLongTask = 59
|
||||
MsgSetNodeAttributeURLBased = 60
|
||||
MsgSetCSSDataURLBased = 61
|
||||
MsgIssueEventDeprecated = 62
|
||||
MsgTechnicalInfo = 63
|
||||
MsgCustomIssue = 64
|
||||
MsgAssetCache = 66
|
||||
MsgCSSInsertRuleURLBased = 67
|
||||
MsgMouseClick = 68
|
||||
MsgMouseClickDeprecated = 69
|
||||
MsgCreateIFrameDocument = 70
|
||||
MsgAdoptedSSReplaceURLBased = 71
|
||||
MsgAdoptedSSReplace = 72
|
||||
MsgAdoptedSSInsertRuleURLBased = 73
|
||||
MsgAdoptedSSInsertRule = 74
|
||||
MsgAdoptedSSDeleteRule = 75
|
||||
MsgAdoptedSSAddOwner = 76
|
||||
MsgAdoptedSSRemoveOwner = 77
|
||||
MsgJSException = 78
|
||||
MsgZustand = 79
|
||||
MsgBatchMeta = 80
|
||||
MsgBatchMetadata = 81
|
||||
MsgPartitionedMessage = 82
|
||||
MsgNetworkRequest = 83
|
||||
MsgWSChannel = 84
|
||||
MsgResourceTiming = 85
|
||||
MsgLongAnimationTask = 89
|
||||
MsgInputChange = 112
|
||||
MsgSelectionChange = 113
|
||||
MsgMouseThrashing = 114
|
||||
MsgUnbindNodes = 115
|
||||
MsgResourceTimingDeprecated = 116
|
||||
MsgTabChange = 117
|
||||
MsgTabData = 118
|
||||
MsgCanvasNode = 119
|
||||
MsgTagTrigger = 120
|
||||
MsgRedux = 121
|
||||
MsgSetPageLocation = 122
|
||||
MsgGraphQL = 123
|
||||
MsgWebVitals = 124
|
||||
MsgIssueEvent = 125
|
||||
MsgSessionEnd = 126
|
||||
MsgSessionSearch = 127
|
||||
MsgMobileSessionStart = 90
|
||||
MsgMobileSessionEnd = 91
|
||||
MsgMobileMetadata = 92
|
||||
MsgMobileEvent = 93
|
||||
MsgMobileUserID = 94
|
||||
MsgMobileUserAnonymousID = 95
|
||||
MsgMobileScreenChanges = 96
|
||||
MsgMobileCrash = 97
|
||||
MsgMobileViewComponentEvent = 98
|
||||
MsgMobileClickEvent = 100
|
||||
MsgMobileInputEvent = 101
|
||||
MsgMobilePerformanceEvent = 102
|
||||
MsgMobileLog = 103
|
||||
MsgMobileInternalError = 104
|
||||
MsgMobileNetworkCall = 105
|
||||
MsgMobileSwipeEvent = 106
|
||||
MsgMobileBatchMeta = 107
|
||||
MsgMobilePerformanceAggregated = 110
|
||||
MsgMobileIssueEvent = 111
|
||||
)
|
||||
|
||||
type Timestamp struct {
|
||||
|
|
@ -184,6 +195,27 @@ func (msg *SessionStart) TypeID() int {
|
|||
return 1
|
||||
}
|
||||
|
||||
type SessionEndDeprecated struct {
|
||||
message
|
||||
Timestamp uint64
|
||||
}
|
||||
|
||||
func (msg *SessionEndDeprecated) Encode() []byte {
|
||||
buf := make([]byte, 11)
|
||||
buf[0] = 3
|
||||
p := 1
|
||||
p = WriteUint(msg.Timestamp, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *SessionEndDeprecated) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *SessionEndDeprecated) TypeID() int {
|
||||
return 3
|
||||
}
|
||||
|
||||
type SetPageLocationDeprecated struct {
|
||||
message
|
||||
URL string
|
||||
|
|
@ -708,6 +740,31 @@ func (msg *PageRenderTiming) TypeID() int {
|
|||
return 24
|
||||
}
|
||||
|
||||
type JSExceptionDeprecated struct {
|
||||
message
|
||||
Name string
|
||||
Message string
|
||||
Payload string
|
||||
}
|
||||
|
||||
func (msg *JSExceptionDeprecated) Encode() []byte {
|
||||
buf := make([]byte, 31+len(msg.Name)+len(msg.Message)+len(msg.Payload))
|
||||
buf[0] = 25
|
||||
p := 1
|
||||
p = WriteString(msg.Name, buf, p)
|
||||
p = WriteString(msg.Message, buf, p)
|
||||
p = WriteString(msg.Payload, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *JSExceptionDeprecated) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *JSExceptionDeprecated) TypeID() int {
|
||||
return 25
|
||||
}
|
||||
|
||||
type IntegrationEvent struct {
|
||||
message
|
||||
Timestamp uint64
|
||||
|
|
@ -1010,6 +1067,87 @@ func (msg *SetNodeAttributeDictGlobal) TypeID() int {
|
|||
return 35
|
||||
}
|
||||
|
||||
type CSSInsertRule struct {
|
||||
message
|
||||
ID uint64
|
||||
Rule string
|
||||
Index uint64
|
||||
}
|
||||
|
||||
func (msg *CSSInsertRule) Encode() []byte {
|
||||
buf := make([]byte, 31+len(msg.Rule))
|
||||
buf[0] = 37
|
||||
p := 1
|
||||
p = WriteUint(msg.ID, buf, p)
|
||||
p = WriteString(msg.Rule, buf, p)
|
||||
p = WriteUint(msg.Index, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *CSSInsertRule) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *CSSInsertRule) TypeID() int {
|
||||
return 37
|
||||
}
|
||||
|
||||
type CSSDeleteRule struct {
|
||||
message
|
||||
ID uint64
|
||||
Index uint64
|
||||
}
|
||||
|
||||
func (msg *CSSDeleteRule) Encode() []byte {
|
||||
buf := make([]byte, 21)
|
||||
buf[0] = 38
|
||||
p := 1
|
||||
p = WriteUint(msg.ID, buf, p)
|
||||
p = WriteUint(msg.Index, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *CSSDeleteRule) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *CSSDeleteRule) TypeID() int {
|
||||
return 38
|
||||
}
|
||||
|
||||
type Fetch struct {
|
||||
message
|
||||
Method string
|
||||
URL string
|
||||
Request string
|
||||
Response string
|
||||
Status uint64
|
||||
Timestamp uint64
|
||||
Duration uint64
|
||||
}
|
||||
|
||||
func (msg *Fetch) Encode() []byte {
|
||||
buf := make([]byte, 71+len(msg.Method)+len(msg.URL)+len(msg.Request)+len(msg.Response))
|
||||
buf[0] = 39
|
||||
p := 1
|
||||
p = WriteString(msg.Method, buf, p)
|
||||
p = WriteString(msg.URL, buf, p)
|
||||
p = WriteString(msg.Request, buf, p)
|
||||
p = WriteString(msg.Response, buf, p)
|
||||
p = WriteUint(msg.Status, buf, p)
|
||||
p = WriteUint(msg.Timestamp, buf, p)
|
||||
p = WriteUint(msg.Duration, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *Fetch) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *Fetch) TypeID() int {
|
||||
return 39
|
||||
}
|
||||
|
||||
type Profiler struct {
|
||||
message
|
||||
Name string
|
||||
|
|
@ -1329,7 +1467,7 @@ func (msg *SetNodeAttributeDict) TypeID() int {
|
|||
return 52
|
||||
}
|
||||
|
||||
type ResourceTimingDeprecated struct {
|
||||
type ResourceTimingDeprecatedDeprecated struct {
|
||||
message
|
||||
Timestamp uint64
|
||||
Duration uint64
|
||||
|
|
@ -1341,7 +1479,7 @@ type ResourceTimingDeprecated struct {
|
|||
Initiator string
|
||||
}
|
||||
|
||||
func (msg *ResourceTimingDeprecated) Encode() []byte {
|
||||
func (msg *ResourceTimingDeprecatedDeprecated) Encode() []byte {
|
||||
buf := make([]byte, 81+len(msg.URL)+len(msg.Initiator))
|
||||
buf[0] = 53
|
||||
p := 1
|
||||
|
|
@ -1356,11 +1494,11 @@ func (msg *ResourceTimingDeprecated) Encode() []byte {
|
|||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *ResourceTimingDeprecated) Decode() Message {
|
||||
func (msg *ResourceTimingDeprecatedDeprecated) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *ResourceTimingDeprecated) TypeID() int {
|
||||
func (msg *ResourceTimingDeprecatedDeprecated) TypeID() int {
|
||||
return 53
|
||||
}
|
||||
|
||||
|
|
@ -1503,6 +1641,39 @@ func (msg *SetNodeFocus) TypeID() int {
|
|||
return 58
|
||||
}
|
||||
|
||||
type LongTask struct {
|
||||
message
|
||||
Timestamp uint64
|
||||
Duration uint64
|
||||
Context uint64
|
||||
ContainerType uint64
|
||||
ContainerSrc string
|
||||
ContainerId string
|
||||
ContainerName string
|
||||
}
|
||||
|
||||
func (msg *LongTask) Encode() []byte {
|
||||
buf := make([]byte, 71+len(msg.ContainerSrc)+len(msg.ContainerId)+len(msg.ContainerName))
|
||||
buf[0] = 59
|
||||
p := 1
|
||||
p = WriteUint(msg.Timestamp, buf, p)
|
||||
p = WriteUint(msg.Duration, buf, p)
|
||||
p = WriteUint(msg.Context, buf, p)
|
||||
p = WriteUint(msg.ContainerType, buf, p)
|
||||
p = WriteString(msg.ContainerSrc, buf, p)
|
||||
p = WriteString(msg.ContainerId, buf, p)
|
||||
p = WriteString(msg.ContainerName, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *LongTask) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *LongTask) TypeID() int {
|
||||
return 59
|
||||
}
|
||||
|
||||
type SetNodeAttributeURLBased struct {
|
||||
message
|
||||
ID uint64
|
||||
|
|
@ -1555,6 +1726,37 @@ func (msg *SetCSSDataURLBased) TypeID() int {
|
|||
return 61
|
||||
}
|
||||
|
||||
type IssueEventDeprecated struct {
|
||||
message
|
||||
MessageID uint64
|
||||
Timestamp uint64
|
||||
Type string
|
||||
ContextString string
|
||||
Context string
|
||||
Payload string
|
||||
}
|
||||
|
||||
func (msg *IssueEventDeprecated) Encode() []byte {
|
||||
buf := make([]byte, 61+len(msg.Type)+len(msg.ContextString)+len(msg.Context)+len(msg.Payload))
|
||||
buf[0] = 62
|
||||
p := 1
|
||||
p = WriteUint(msg.MessageID, buf, p)
|
||||
p = WriteUint(msg.Timestamp, buf, p)
|
||||
p = WriteString(msg.Type, buf, p)
|
||||
p = WriteString(msg.ContextString, buf, p)
|
||||
p = WriteString(msg.Context, buf, p)
|
||||
p = WriteString(msg.Payload, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *IssueEventDeprecated) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *IssueEventDeprecated) TypeID() int {
|
||||
return 62
|
||||
}
|
||||
|
||||
type TechnicalInfo struct {
|
||||
message
|
||||
Type string
|
||||
|
|
@ -1622,6 +1824,33 @@ func (msg *AssetCache) TypeID() int {
|
|||
return 66
|
||||
}
|
||||
|
||||
type CSSInsertRuleURLBased struct {
|
||||
message
|
||||
ID uint64
|
||||
Rule string
|
||||
Index uint64
|
||||
BaseURL string
|
||||
}
|
||||
|
||||
func (msg *CSSInsertRuleURLBased) Encode() []byte {
|
||||
buf := make([]byte, 41+len(msg.Rule)+len(msg.BaseURL))
|
||||
buf[0] = 67
|
||||
p := 1
|
||||
p = WriteUint(msg.ID, buf, p)
|
||||
p = WriteString(msg.Rule, buf, p)
|
||||
p = WriteUint(msg.Index, buf, p)
|
||||
p = WriteString(msg.BaseURL, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *CSSInsertRuleURLBased) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *CSSInsertRuleURLBased) TypeID() int {
|
||||
return 67
|
||||
}
|
||||
|
||||
type MouseClick struct {
|
||||
message
|
||||
ID uint64
|
||||
|
|
@ -1922,6 +2151,31 @@ func (msg *Zustand) TypeID() int {
|
|||
return 79
|
||||
}
|
||||
|
||||
type BatchMeta struct {
|
||||
message
|
||||
PageNo uint64
|
||||
FirstIndex uint64
|
||||
Timestamp int64
|
||||
}
|
||||
|
||||
func (msg *BatchMeta) Encode() []byte {
|
||||
buf := make([]byte, 31)
|
||||
buf[0] = 80
|
||||
p := 1
|
||||
p = WriteUint(msg.PageNo, buf, p)
|
||||
p = WriteUint(msg.FirstIndex, buf, p)
|
||||
p = WriteInt(msg.Timestamp, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *BatchMeta) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *BatchMeta) TypeID() int {
|
||||
return 80
|
||||
}
|
||||
|
||||
type BatchMetadata struct {
|
||||
message
|
||||
Version uint64
|
||||
|
|
@ -2042,6 +2296,90 @@ func (msg *WSChannel) TypeID() int {
|
|||
return 84
|
||||
}
|
||||
|
||||
type ResourceTiming struct {
|
||||
message
|
||||
Timestamp uint64
|
||||
Duration uint64
|
||||
TTFB uint64
|
||||
HeaderSize uint64
|
||||
EncodedBodySize uint64
|
||||
DecodedBodySize uint64
|
||||
URL string
|
||||
Initiator string
|
||||
TransferredSize uint64
|
||||
Cached bool
|
||||
Queueing uint64
|
||||
DnsLookup uint64
|
||||
InitialConnection uint64
|
||||
SSL uint64
|
||||
ContentDownload uint64
|
||||
Total uint64
|
||||
Stalled uint64
|
||||
}
|
||||
|
||||
func (msg *ResourceTiming) Encode() []byte {
|
||||
buf := make([]byte, 171+len(msg.URL)+len(msg.Initiator))
|
||||
buf[0] = 85
|
||||
p := 1
|
||||
p = WriteUint(msg.Timestamp, buf, p)
|
||||
p = WriteUint(msg.Duration, buf, p)
|
||||
p = WriteUint(msg.TTFB, buf, p)
|
||||
p = WriteUint(msg.HeaderSize, buf, p)
|
||||
p = WriteUint(msg.EncodedBodySize, buf, p)
|
||||
p = WriteUint(msg.DecodedBodySize, buf, p)
|
||||
p = WriteString(msg.URL, buf, p)
|
||||
p = WriteString(msg.Initiator, buf, p)
|
||||
p = WriteUint(msg.TransferredSize, buf, p)
|
||||
p = WriteBoolean(msg.Cached, buf, p)
|
||||
p = WriteUint(msg.Queueing, buf, p)
|
||||
p = WriteUint(msg.DnsLookup, buf, p)
|
||||
p = WriteUint(msg.InitialConnection, buf, p)
|
||||
p = WriteUint(msg.SSL, buf, p)
|
||||
p = WriteUint(msg.ContentDownload, buf, p)
|
||||
p = WriteUint(msg.Total, buf, p)
|
||||
p = WriteUint(msg.Stalled, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *ResourceTiming) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *ResourceTiming) TypeID() int {
|
||||
return 85
|
||||
}
|
||||
|
||||
type LongAnimationTask struct {
|
||||
message
|
||||
Name string
|
||||
Duration int64
|
||||
BlockingDuration int64
|
||||
FirstUIEventTimestamp int64
|
||||
StartTime int64
|
||||
Scripts string
|
||||
}
|
||||
|
||||
func (msg *LongAnimationTask) Encode() []byte {
|
||||
buf := make([]byte, 61+len(msg.Name)+len(msg.Scripts))
|
||||
buf[0] = 89
|
||||
p := 1
|
||||
p = WriteString(msg.Name, buf, p)
|
||||
p = WriteInt(msg.Duration, buf, p)
|
||||
p = WriteInt(msg.BlockingDuration, buf, p)
|
||||
p = WriteInt(msg.FirstUIEventTimestamp, buf, p)
|
||||
p = WriteInt(msg.StartTime, buf, p)
|
||||
p = WriteString(msg.Scripts, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *LongAnimationTask) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *LongAnimationTask) TypeID() int {
|
||||
return 89
|
||||
}
|
||||
|
||||
type InputChange struct {
|
||||
message
|
||||
ID uint64
|
||||
|
|
@ -2140,7 +2478,7 @@ func (msg *UnbindNodes) TypeID() int {
|
|||
return 115
|
||||
}
|
||||
|
||||
type ResourceTiming struct {
|
||||
type ResourceTimingDeprecated struct {
|
||||
message
|
||||
Timestamp uint64
|
||||
Duration uint64
|
||||
|
|
@ -2154,7 +2492,7 @@ type ResourceTiming struct {
|
|||
Cached bool
|
||||
}
|
||||
|
||||
func (msg *ResourceTiming) Encode() []byte {
|
||||
func (msg *ResourceTimingDeprecated) Encode() []byte {
|
||||
buf := make([]byte, 101+len(msg.URL)+len(msg.Initiator))
|
||||
buf[0] = 116
|
||||
p := 1
|
||||
|
|
@ -2171,11 +2509,11 @@ func (msg *ResourceTiming) Encode() []byte {
|
|||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *ResourceTiming) Decode() Message {
|
||||
func (msg *ResourceTimingDeprecated) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *ResourceTiming) TypeID() int {
|
||||
func (msg *ResourceTimingDeprecated) TypeID() int {
|
||||
return 116
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -68,6 +68,15 @@ func DecodeSessionStart(reader BytesReader) (Message, error) {
|
|||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeSessionEndDeprecated(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &SessionEndDeprecated{}
|
||||
if msg.Timestamp, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeSetPageLocationDeprecated(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &SetPageLocationDeprecated{}
|
||||
|
|
@ -381,6 +390,21 @@ func DecodePageRenderTiming(reader BytesReader) (Message, error) {
|
|||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeJSExceptionDeprecated(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &JSExceptionDeprecated{}
|
||||
if msg.Name, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Message, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Payload, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeIntegrationEvent(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &IntegrationEvent{}
|
||||
|
|
@ -609,6 +633,60 @@ func DecodeSetNodeAttributeDictGlobal(reader BytesReader) (Message, error) {
|
|||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeCSSInsertRule(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &CSSInsertRule{}
|
||||
if msg.ID, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Rule, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Index, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeCSSDeleteRule(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &CSSDeleteRule{}
|
||||
if msg.ID, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Index, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeFetch(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &Fetch{}
|
||||
if msg.Method, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.URL, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Request, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Response, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Status, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Timestamp, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Duration, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeProfiler(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &Profiler{}
|
||||
|
|
@ -795,9 +873,9 @@ func DecodeSetNodeAttributeDict(reader BytesReader) (Message, error) {
|
|||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeResourceTimingDeprecated(reader BytesReader) (Message, error) {
|
||||
func DecodeResourceTimingDeprecatedDeprecated(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &ResourceTimingDeprecated{}
|
||||
msg := &ResourceTimingDeprecatedDeprecated{}
|
||||
if msg.Timestamp, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -921,6 +999,33 @@ func DecodeSetNodeFocus(reader BytesReader) (Message, error) {
|
|||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeLongTask(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &LongTask{}
|
||||
if msg.Timestamp, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Duration, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Context, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.ContainerType, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.ContainerSrc, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.ContainerId, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.ContainerName, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeSetNodeAttributeURLBased(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &SetNodeAttributeURLBased{}
|
||||
|
|
@ -954,6 +1059,30 @@ func DecodeSetCSSDataURLBased(reader BytesReader) (Message, error) {
|
|||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeIssueEventDeprecated(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &IssueEventDeprecated{}
|
||||
if msg.MessageID, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Timestamp, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Type, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.ContextString, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Context, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Payload, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeTechnicalInfo(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &TechnicalInfo{}
|
||||
|
|
@ -987,6 +1116,24 @@ func DecodeAssetCache(reader BytesReader) (Message, error) {
|
|||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeCSSInsertRuleURLBased(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &CSSInsertRuleURLBased{}
|
||||
if msg.ID, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Rule, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Index, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.BaseURL, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeMouseClick(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &MouseClick{}
|
||||
|
|
@ -1167,6 +1314,21 @@ func DecodeZustand(reader BytesReader) (Message, error) {
|
|||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeBatchMeta(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &BatchMeta{}
|
||||
if msg.PageNo, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.FirstIndex, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Timestamp, err = reader.ReadInt(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeBatchMetadata(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &BatchMetadata{}
|
||||
|
|
@ -1257,6 +1419,87 @@ func DecodeWSChannel(reader BytesReader) (Message, error) {
|
|||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeResourceTiming(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &ResourceTiming{}
|
||||
if msg.Timestamp, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Duration, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.TTFB, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.HeaderSize, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.EncodedBodySize, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.DecodedBodySize, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.URL, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Initiator, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.TransferredSize, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Cached, err = reader.ReadBoolean(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Queueing, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.DnsLookup, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.InitialConnection, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.SSL, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.ContentDownload, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Total, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Stalled, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeLongAnimationTask(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &LongAnimationTask{}
|
||||
if msg.Name, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Duration, err = reader.ReadInt(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.BlockingDuration, err = reader.ReadInt(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.FirstUIEventTimestamp, err = reader.ReadInt(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.StartTime, err = reader.ReadInt(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Scripts, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeInputChange(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &InputChange{}
|
||||
|
|
@ -1314,9 +1557,9 @@ func DecodeUnbindNodes(reader BytesReader) (Message, error) {
|
|||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeResourceTiming(reader BytesReader) (Message, error) {
|
||||
func DecodeResourceTimingDeprecated(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &ResourceTiming{}
|
||||
msg := &ResourceTimingDeprecated{}
|
||||
if msg.Timestamp, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -1926,6 +2169,8 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) {
|
|||
return DecodeTimestamp(reader)
|
||||
case 1:
|
||||
return DecodeSessionStart(reader)
|
||||
case 3:
|
||||
return DecodeSessionEndDeprecated(reader)
|
||||
case 4:
|
||||
return DecodeSetPageLocationDeprecated(reader)
|
||||
case 5:
|
||||
|
|
@ -1968,6 +2213,8 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) {
|
|||
return DecodePageLoadTiming(reader)
|
||||
case 24:
|
||||
return DecodePageRenderTiming(reader)
|
||||
case 25:
|
||||
return DecodeJSExceptionDeprecated(reader)
|
||||
case 26:
|
||||
return DecodeIntegrationEvent(reader)
|
||||
case 27:
|
||||
|
|
@ -1988,6 +2235,12 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) {
|
|||
return DecodeStringDictGlobal(reader)
|
||||
case 35:
|
||||
return DecodeSetNodeAttributeDictGlobal(reader)
|
||||
case 37:
|
||||
return DecodeCSSInsertRule(reader)
|
||||
case 38:
|
||||
return DecodeCSSDeleteRule(reader)
|
||||
case 39:
|
||||
return DecodeFetch(reader)
|
||||
case 40:
|
||||
return DecodeProfiler(reader)
|
||||
case 41:
|
||||
|
|
@ -2015,7 +2268,7 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) {
|
|||
case 52:
|
||||
return DecodeSetNodeAttributeDict(reader)
|
||||
case 53:
|
||||
return DecodeResourceTimingDeprecated(reader)
|
||||
return DecodeResourceTimingDeprecatedDeprecated(reader)
|
||||
case 54:
|
||||
return DecodeConnectionInformation(reader)
|
||||
case 55:
|
||||
|
|
@ -2026,16 +2279,22 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) {
|
|||
return DecodeLoadFontFace(reader)
|
||||
case 58:
|
||||
return DecodeSetNodeFocus(reader)
|
||||
case 59:
|
||||
return DecodeLongTask(reader)
|
||||
case 60:
|
||||
return DecodeSetNodeAttributeURLBased(reader)
|
||||
case 61:
|
||||
return DecodeSetCSSDataURLBased(reader)
|
||||
case 62:
|
||||
return DecodeIssueEventDeprecated(reader)
|
||||
case 63:
|
||||
return DecodeTechnicalInfo(reader)
|
||||
case 64:
|
||||
return DecodeCustomIssue(reader)
|
||||
case 66:
|
||||
return DecodeAssetCache(reader)
|
||||
case 67:
|
||||
return DecodeCSSInsertRuleURLBased(reader)
|
||||
case 68:
|
||||
return DecodeMouseClick(reader)
|
||||
case 69:
|
||||
|
|
@ -2060,6 +2319,8 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) {
|
|||
return DecodeJSException(reader)
|
||||
case 79:
|
||||
return DecodeZustand(reader)
|
||||
case 80:
|
||||
return DecodeBatchMeta(reader)
|
||||
case 81:
|
||||
return DecodeBatchMetadata(reader)
|
||||
case 82:
|
||||
|
|
@ -2068,6 +2329,10 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) {
|
|||
return DecodeNetworkRequest(reader)
|
||||
case 84:
|
||||
return DecodeWSChannel(reader)
|
||||
case 85:
|
||||
return DecodeResourceTiming(reader)
|
||||
case 89:
|
||||
return DecodeLongAnimationTask(reader)
|
||||
case 112:
|
||||
return DecodeInputChange(reader)
|
||||
case 113:
|
||||
|
|
@ -2077,7 +2342,7 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) {
|
|||
case 115:
|
||||
return DecodeUnbindNodes(reader)
|
||||
case 116:
|
||||
return DecodeResourceTiming(reader)
|
||||
return DecodeResourceTimingDeprecated(reader)
|
||||
case 117:
|
||||
return DecodeTabChange(reader)
|
||||
case 118:
|
||||
|
|
|
|||
|
|
@ -89,13 +89,15 @@ func (m *messageReaderImpl) Parse() (err error) {
|
|||
if err != nil {
|
||||
return fmt.Errorf("read message err: %s", err)
|
||||
}
|
||||
if m.msgType == MsgBatchMetadata {
|
||||
if m.msgType == MsgBatchMeta || m.msgType == MsgBatchMetadata {
|
||||
if len(m.list) > 0 {
|
||||
return fmt.Errorf("batch meta not at the start of batch")
|
||||
}
|
||||
switch message := msg.(type) {
|
||||
case *BatchMetadata:
|
||||
m.version = int(message.Version)
|
||||
case *BatchMeta:
|
||||
m.version = 0
|
||||
}
|
||||
if m.version != 1 {
|
||||
// Unsupported tracker version, reset reader
|
||||
|
|
|
|||
|
|
@ -79,34 +79,31 @@ func (e *handlersImpl) GetAll() []*api.Description {
|
|||
}
|
||||
}
|
||||
|
||||
func getSessionTimestamp(req *StartSessionRequest, startTimeMili int64) uint64 {
|
||||
func getSessionTimestamp(req *StartSessionRequest, startTimeMili int64) (ts uint64) {
|
||||
ts = uint64(req.Timestamp)
|
||||
if req.IsOffline {
|
||||
return uint64(req.Timestamp)
|
||||
return
|
||||
}
|
||||
ts := uint64(startTimeMili)
|
||||
if req.BufferDiff > 0 && req.BufferDiff < 5*60*1000 {
|
||||
ts -= req.BufferDiff
|
||||
}
|
||||
return ts
|
||||
}
|
||||
|
||||
func validateTrackerVersion(ver string) error {
|
||||
c, err := semver.NewConstraint(">=6.0.0")
|
||||
c, err := semver.NewConstraint(">=4.1.6")
|
||||
if err != nil {
|
||||
return err
|
||||
return
|
||||
}
|
||||
ver := req.TrackerVersion
|
||||
parts := strings.Split(ver, "-")
|
||||
if len(parts) > 1 {
|
||||
ver = parts[0]
|
||||
}
|
||||
v, err := semver.NewVersion(ver)
|
||||
if err != nil {
|
||||
return err
|
||||
return
|
||||
}
|
||||
if !c.Check(v) {
|
||||
return errors.New("unsupported tracker version")
|
||||
if c.Check(v) {
|
||||
ts = uint64(startTimeMili)
|
||||
if req.BufferDiff > 0 && req.BufferDiff < 5*60*1000 {
|
||||
ts -= req.BufferDiff
|
||||
}
|
||||
}
|
||||
return nil
|
||||
return
|
||||
}
|
||||
|
||||
func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) {
|
||||
|
|
@ -135,11 +132,6 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req
|
|||
|
||||
// Add tracker version to context
|
||||
r = r.WithContext(context.WithValue(r.Context(), "tracker", req.TrackerVersion))
|
||||
if err := validateTrackerVersion(req.TrackerVersion); err != nil {
|
||||
e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err)
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
||||
// Handler's logic
|
||||
if req.ProjectKey == nil {
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ type Task struct {
|
|||
Duration int
|
||||
Status string
|
||||
Path string
|
||||
tx pool.Tx
|
||||
tx *pool.Tx
|
||||
}
|
||||
|
||||
func (t *Task) HasToTrim() bool {
|
||||
|
|
@ -65,7 +65,7 @@ func (t *tasksImpl) Get() (task *Task, err error) {
|
|||
}
|
||||
}()
|
||||
|
||||
task = &Task{tx: pool.Tx{Tx: tx}}
|
||||
task = &Task{tx: tx}
|
||||
sql := `SELECT spot_id, crop, duration FROM spots.tasks WHERE status = 'pending' ORDER BY added_time FOR UPDATE SKIP LOCKED LIMIT 1`
|
||||
err = tx.TxQueryRow(sql).Scan(&task.SpotID, &task.Crop, &task.Duration)
|
||||
if err != nil {
|
||||
|
|
|
|||
|
|
@ -52,6 +52,7 @@ func NewTranscoder(cfg *spot.Config, log logger.Logger, objStorage objectstorage
|
|||
tasks: NewTasks(conn),
|
||||
streams: NewStreams(log, conn, objStorage),
|
||||
spots: spots,
|
||||
metrics: metrics,
|
||||
}
|
||||
tnsc.prepareWorkers = workers.NewPool(2, 4, tnsc.prepare)
|
||||
tnsc.transcodeWorkers = workers.NewPool(2, 4, tnsc.transcode)
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ ignore:
|
|||
- "**/*/build/**"
|
||||
- "**/*/.test.*"
|
||||
- "**/*/version.ts"
|
||||
review:
|
||||
poem: false
|
||||
review_status: false
|
||||
collapse_walkthrough: true
|
||||
comment:
|
||||
layout: "condensed_header, condensed_files, condensed_footer"
|
||||
hide_project_coverage: TRUE
|
||||
|
|
|
|||
18
ee/api/.gitignore
vendored
18
ee/api/.gitignore
vendored
|
|
@ -187,7 +187,7 @@ Pipfile.lock
|
|||
/chalicelib/core/announcements.py
|
||||
/chalicelib/core/assist.py
|
||||
/chalicelib/core/authorizers.py
|
||||
/chalicelib/core/autocomplete/autocomplete.py
|
||||
/chalicelib/core/autocomplete
|
||||
/chalicelib/core/boarding.py
|
||||
/chalicelib/core/canvas.py
|
||||
/chalicelib/core/collaborations/__init__.py
|
||||
|
|
@ -211,22 +211,13 @@ Pipfile.lock
|
|||
/chalicelib/core/metadata.py
|
||||
/chalicelib/core/mobile.py
|
||||
/chalicelib/core/saved_search.py
|
||||
/chalicelib/core/sessions/sessions_pg.py
|
||||
/chalicelib/core/sessions/sessions_ch.py
|
||||
/chalicelib/core/sessions/sessions_devtool/sessions_devtool.py
|
||||
/chalicelib/core/sessions/sessions_favorite/sessions_favorite.py
|
||||
/chalicelib/core/sessions/sessions_assignments.py
|
||||
/chalicelib/core/sessions/sessions_metas.py
|
||||
/chalicelib/core/sessions/sessions_mobs.py
|
||||
/chalicelib/core/sessions/sessions_replay.py
|
||||
/chalicelib/core/sessions/sessions_search.py
|
||||
/chalicelib/core/sessions/performance_event.py
|
||||
/chalicelib/core/sessions/*.py
|
||||
/chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
|
||||
/chalicelib/core/sessions/unprocessed_sessions.py
|
||||
/chalicelib/core/metrics/modules
|
||||
/chalicelib/core/socket_ios.py
|
||||
/chalicelib/core/sourcemaps
|
||||
/chalicelib/core/tags.py
|
||||
/chalicelib/core/product_analytics
|
||||
/chalicelib/saml
|
||||
/chalicelib/utils/__init__.py
|
||||
/chalicelib/utils/args_transformer.py
|
||||
|
|
@ -288,4 +279,7 @@ Pipfile.lock
|
|||
/chalicelib/core/errors/errors_pg.py
|
||||
/chalicelib/core/errors/errors_ch.py
|
||||
/chalicelib/core/errors/errors_details.py
|
||||
/chalicelib/core/notes.py
|
||||
/chalicelib/utils/contextual_validators.py
|
||||
/routers/subs/product_analytics.py
|
||||
/schemas/product_analytics.py
|
||||
|
|
|
|||
|
|
@ -6,25 +6,23 @@ name = "pypi"
|
|||
[packages]
|
||||
urllib3 = "==2.3.0"
|
||||
requests = "==2.32.3"
|
||||
boto3 = "==1.36.12"
|
||||
boto3 = "==1.37.21"
|
||||
pyjwt = "==2.10.1"
|
||||
psycopg2-binary = "==2.9.10"
|
||||
psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
|
||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
|
||||
psycopg = {extras = ["pool", "binary"], version = "==3.2.6"}
|
||||
clickhouse-connect = "==0.8.15"
|
||||
elasticsearch = "==8.17.1"
|
||||
elasticsearch = "==8.17.2"
|
||||
jira = "==3.8.0"
|
||||
cachetools = "==5.5.1"
|
||||
fastapi = "==0.115.8"
|
||||
cachetools = "==5.5.2"
|
||||
fastapi = "==0.115.12"
|
||||
uvicorn = {extras = ["standard"], version = "==0.34.0"}
|
||||
gunicorn = "==23.0.0"
|
||||
python-decouple = "==3.8"
|
||||
pydantic = {extras = ["email"], version = "==2.10.6"}
|
||||
apscheduler = "==3.11.0"
|
||||
python3-saml = "==1.16.0"
|
||||
python-multipart = "==0.0.20"
|
||||
redis = "==5.2.1"
|
||||
azure-storage-blob = "==12.24.1"
|
||||
azure-storage-blob = "==12.25.0"
|
||||
|
||||
[dev-packages]
|
||||
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ from chalicelib.utils import pg_client, ch_client
|
|||
from crons import core_crons, ee_crons, core_dynamic_crons
|
||||
from routers import core, core_dynamic
|
||||
from routers import ee
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_analytics
|
||||
from routers.subs import v1_api_ee
|
||||
|
||||
if config("ENABLE_SSO", cast=bool, default=True):
|
||||
|
|
@ -150,9 +150,9 @@ app.include_router(spot.public_app)
|
|||
app.include_router(spot.app)
|
||||
app.include_router(spot.app_apikey)
|
||||
|
||||
app.include_router(product_anaytics.public_app)
|
||||
app.include_router(product_anaytics.app)
|
||||
app.include_router(product_anaytics.app_apikey)
|
||||
app.include_router(product_analytics.public_app, prefix="/ap")
|
||||
app.include_router(product_analytics.app, prefix="/ap")
|
||||
app.include_router(product_analytics.app_apikey, prefix="/ap")
|
||||
|
||||
if config("ENABLE_SSO", cast=bool, default=True):
|
||||
app.include_router(saml.public_app)
|
||||
|
|
|
|||
|
|
@ -1,11 +0,0 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
|
||||
if config("EXP_AUTOCOMPLETE", cast=bool, default=False):
|
||||
logging.info(">>> Using experimental autocomplete")
|
||||
from . import autocomplete_ch as autocomplete
|
||||
else:
|
||||
from . import autocomplete
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
from . import sessions_pg
|
||||
from . import sessions_pg as sessions_legacy
|
||||
from . import sessions_ch
|
||||
from . import sessions_search as sessions_search_legacy
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental sessions search")
|
||||
from . import sessions_ch as sessions
|
||||
from . import sessions_search_exp as sessions_search
|
||||
else:
|
||||
from . import sessions_pg as sessions
|
||||
from . import sessions_search as sessions_search
|
||||
|
|
@ -1,9 +1,16 @@
|
|||
from typing import Union
|
||||
import logging
|
||||
import math
|
||||
import re
|
||||
import struct
|
||||
from decimal import Decimal
|
||||
from typing import Any, Union
|
||||
|
||||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from decouple import config
|
||||
import logging
|
||||
from schemas import SearchEventOperator
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -80,3 +87,162 @@ def get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEvent
|
|||
if event_type not in defs:
|
||||
raise Exception(f"unsupported EventType:{event_type}")
|
||||
return defs.get(event_type)
|
||||
|
||||
|
||||
# AI generated
|
||||
def simplify_clickhouse_type(ch_type: str) -> str:
|
||||
"""
|
||||
Simplify a ClickHouse data type name to a broader category like:
|
||||
int, float, decimal, datetime, string, uuid, enum, array, tuple, map, nested, etc.
|
||||
"""
|
||||
|
||||
# 1) Strip out common wrappers like Nullable(...) or LowCardinality(...)
|
||||
# Possibly multiple wrappers: e.g. "LowCardinality(Nullable(Int32))"
|
||||
pattern_wrappers = re.compile(r'(Nullable|LowCardinality)\((.*)\)')
|
||||
while True:
|
||||
match = pattern_wrappers.match(ch_type)
|
||||
if match:
|
||||
ch_type = match.group(2)
|
||||
else:
|
||||
break
|
||||
|
||||
# 2) Normalize (lowercase) for easier checks
|
||||
normalized_type = ch_type.lower()
|
||||
|
||||
# 3) Use pattern matching or direct checks for known categories
|
||||
# (You can adapt this as you see fit for your environment.)
|
||||
|
||||
# Integers: Int8, Int16, Int32, Int64, Int128, Int256, UInt8, UInt16, ...
|
||||
if re.match(r'^(u?int)(8|16|32|64|128|256)$', normalized_type):
|
||||
return "int"
|
||||
|
||||
# Floats: Float32, Float64
|
||||
if re.match(r'^float(32|64)|double$', normalized_type):
|
||||
return "float"
|
||||
|
||||
# Decimal: Decimal(P, S)
|
||||
if normalized_type.startswith("decimal"):
|
||||
# return "decimal"
|
||||
return "float"
|
||||
|
||||
# Date/DateTime
|
||||
if normalized_type.startswith("date"):
|
||||
return "datetime"
|
||||
if normalized_type.startswith("datetime"):
|
||||
return "datetime"
|
||||
|
||||
# Strings: String, FixedString(N)
|
||||
if normalized_type.startswith("string"):
|
||||
return "string"
|
||||
if normalized_type.startswith("fixedstring"):
|
||||
return "string"
|
||||
|
||||
# UUID
|
||||
if normalized_type.startswith("uuid"):
|
||||
# return "uuid"
|
||||
return "string"
|
||||
|
||||
# Enums: Enum8(...) or Enum16(...)
|
||||
if normalized_type.startswith("enum8") or normalized_type.startswith("enum16"):
|
||||
# return "enum"
|
||||
return "string"
|
||||
|
||||
# Arrays: Array(T)
|
||||
if normalized_type.startswith("array"):
|
||||
return "array"
|
||||
|
||||
# Tuples: Tuple(T1, T2, ...)
|
||||
if normalized_type.startswith("tuple"):
|
||||
return "tuple"
|
||||
|
||||
# Map(K, V)
|
||||
if normalized_type.startswith("map"):
|
||||
return "map"
|
||||
|
||||
# Nested(...)
|
||||
if normalized_type.startswith("nested"):
|
||||
return "nested"
|
||||
|
||||
# If we didn't match above, just return the original type in lowercase
|
||||
return normalized_type
|
||||
|
||||
|
||||
def simplify_clickhouse_types(ch_types: list[str]) -> list[str]:
|
||||
"""
|
||||
Takes a list of ClickHouse types and returns a list of simplified types
|
||||
by calling `simplify_clickhouse_type` on each.
|
||||
"""
|
||||
return list(set([simplify_clickhouse_type(t) for t in ch_types]))
|
||||
|
||||
|
||||
def get_sub_condition(col_name: str, val_name: str,
|
||||
operator: Union[schemas.SearchEventOperator, schemas.MathOperator]) -> str:
|
||||
if operator == SearchEventOperator.PATTERN:
|
||||
return f"match({col_name}, %({val_name})s)"
|
||||
op = sh.get_sql_operator(operator)
|
||||
return f"{col_name} {op} %({val_name})s"
|
||||
|
||||
|
||||
def get_col_cast(data_type: schemas.PropertyType, value: Any) -> str:
|
||||
if value is None or len(value) == 0:
|
||||
return ""
|
||||
if isinstance(value, list):
|
||||
value = value[0]
|
||||
if data_type in (schemas.PropertyType.INT, schemas.PropertyType.FLOAT):
|
||||
return best_clickhouse_type(value)
|
||||
return data_type.capitalize()
|
||||
|
||||
|
||||
# (type_name, minimum, maximum) – ordered by increasing size
|
||||
_INT_RANGES = [
|
||||
("Int8", -128, 127),
|
||||
("UInt8", 0, 255),
|
||||
("Int16", -32_768, 32_767),
|
||||
("UInt16", 0, 65_535),
|
||||
("Int32", -2_147_483_648, 2_147_483_647),
|
||||
("UInt32", 0, 4_294_967_295),
|
||||
("Int64", -9_223_372_036_854_775_808, 9_223_372_036_854_775_807),
|
||||
("UInt64", 0, 18_446_744_073_709_551_615),
|
||||
]
|
||||
|
||||
|
||||
def best_clickhouse_type(value):
|
||||
"""
|
||||
Return the most compact ClickHouse numeric type that can store *value* loss-lessly.
|
||||
|
||||
"""
|
||||
# Treat bool like tiny int
|
||||
if isinstance(value, bool):
|
||||
value = int(value)
|
||||
|
||||
# --- Integers ---
|
||||
if isinstance(value, int):
|
||||
for name, lo, hi in _INT_RANGES:
|
||||
if lo <= value <= hi:
|
||||
return name
|
||||
# Beyond UInt64: ClickHouse offers Int128 / Int256 or Decimal
|
||||
return "Int128"
|
||||
|
||||
# --- Decimal.Decimal (exact) ---
|
||||
if isinstance(value, Decimal):
|
||||
# ClickHouse Decimal32/64/128 have 9 / 18 / 38 significant digits.
|
||||
digits = len(value.as_tuple().digits)
|
||||
if digits <= 9:
|
||||
return "Decimal32"
|
||||
elif digits <= 18:
|
||||
return "Decimal64"
|
||||
else:
|
||||
return "Decimal128"
|
||||
|
||||
# --- Floats ---
|
||||
if isinstance(value, float):
|
||||
if not math.isfinite(value):
|
||||
return "Float64" # inf / nan → always Float64
|
||||
|
||||
# Check if a round-trip through 32-bit float preserves the bit pattern
|
||||
packed = struct.pack("f", value)
|
||||
if struct.unpack("f", packed)[0] == value:
|
||||
return "Float32"
|
||||
return "Float64"
|
||||
|
||||
raise TypeError(f"Unsupported type: {type(value).__name__}")
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ rm -rf ./build_crons.sh
|
|||
rm -rf ./chalicelib/core/announcements.py
|
||||
rm -rf ./chalicelib/core/assist.py
|
||||
rm -rf ./chalicelib/core/authorizers.py
|
||||
rm -rf ./chalicelib/core/autocomplete/autocomplete.py
|
||||
rm -rf ./chalicelib/core/autocomplete
|
||||
rm -rf ./chalicelib/core/collaborations/__init__.py
|
||||
rm -rf ./chalicelib/core/collaborations/collaboration_base.py
|
||||
rm -rf ./chalicelib/core/collaborations/collaboration_msteams.py
|
||||
|
|
@ -32,23 +32,14 @@ rm -rf ./chalicelib/core/log_tools
|
|||
rm -rf ./chalicelib/core/metadata.py
|
||||
rm -rf ./chalicelib/core/mobile.py
|
||||
rm -rf ./chalicelib/core/saved_search.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_pg.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_ch.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_devtool/sessions_devtool.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_favorite/sessions_favorite.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_assignments.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_metas.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_mobs.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_replay.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_search.py
|
||||
rm -rf ./chalicelib/core/sessions/performance_event.py
|
||||
rm -rf ./chalicelib/core/sessions/*.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
|
||||
rm -rf ./chalicelib/core/sessions/unprocessed_sessions.py
|
||||
rm -rf ./chalicelib/core/metrics/modules
|
||||
rm -rf ./chalicelib/core/socket_ios.py
|
||||
rm -rf ./chalicelib/core/sourcemaps
|
||||
rm -rf ./chalicelib/core/user_testing.py
|
||||
rm -rf ./chalicelib/core/tags.py
|
||||
rm -rf ./chalicelib/core/product_analytics
|
||||
rm -rf ./chalicelib/saml
|
||||
rm -rf ./chalicelib/utils/__init__.py
|
||||
rm -rf ./chalicelib/utils/args_transformer.py
|
||||
|
|
@ -108,4 +99,7 @@ rm -rf ./chalicelib/core/errors/modules
|
|||
rm -rf ./chalicelib/core/errors/errors_pg.py
|
||||
rm -rf ./chalicelib/core/errors/errors_ch.py
|
||||
rm -rf ./chalicelib/core/errors/errors_details.py
|
||||
rm -rf ./chalicelib/core/notes.py
|
||||
rm -rf ./chalicelib/utils/contextual_validators.py
|
||||
rm -rf ./routers/subs/product_analytics.py
|
||||
rm -rf ./schemas/product_analytics.py
|
||||
|
|
@ -1,19 +1,18 @@
|
|||
urllib3==2.3.0
|
||||
urllib3==2.4.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.38.10
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
psycopg[pool,binary]==3.2.7
|
||||
clickhouse-connect==0.8.17
|
||||
elasticsearch==9.0.1
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
uvicorn[standard]==0.34.0
|
||||
fastapi==0.115.12
|
||||
uvicorn[standard]==0.34.2
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
pydantic[email]==2.11.4
|
||||
apscheduler==3.11.0
|
||||
|
||||
azure-storage-blob==12.24.1
|
||||
azure-storage-blob==12.25.1
|
||||
|
|
|
|||
|
|
@ -1,19 +1,18 @@
|
|||
urllib3==2.3.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.37.21
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
psycopg[pool,binary]==3.2.7
|
||||
clickhouse-connect==0.8.17
|
||||
elasticsearch==9.0.1
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
fastapi==0.115.12
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
pydantic[email]==2.11.4
|
||||
apscheduler==3.11.0
|
||||
|
||||
redis==5.2.1
|
||||
azure-storage-blob==12.24.1
|
||||
redis==6.0.0
|
||||
azure-storage-blob==12.25.1
|
||||
|
|
|
|||
|
|
@ -1,28 +1,26 @@
|
|||
urllib3==2.3.0
|
||||
urllib3==2.4.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.38.10
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
psycopg[pool,binary]==3.2.7
|
||||
clickhouse-connect==0.8.17
|
||||
elasticsearch==9.0.1
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
uvicorn[standard]==0.34.0
|
||||
fastapi==0.115.12
|
||||
uvicorn[standard]==0.34.2
|
||||
gunicorn==23.0.0
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
pydantic[email]==2.11.4
|
||||
apscheduler==3.11.0
|
||||
|
||||
# TODO: enable after xmlsec fix https://github.com/xmlsec/python-xmlsec/issues/252
|
||||
#--no-binary is used to avoid libxml2 library version incompatibilities between xmlsec and lxml
|
||||
python3-saml==1.16.0
|
||||
--no-binary=lxml
|
||||
python3-saml==1.16.0 --no-binary=lxml
|
||||
python-multipart==0.0.20
|
||||
|
||||
redis==5.2.1
|
||||
redis==6.0.0
|
||||
#confluent-kafka==2.1.0
|
||||
azure-storage-blob==12.24.1
|
||||
azure-storage-blob==12.25.1
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ from chalicelib.core import webhook
|
|||
from chalicelib.core.collaborations.collaboration_slack import Slack
|
||||
from chalicelib.core.errors import errors, errors_details
|
||||
from chalicelib.core.metrics import heatmaps
|
||||
from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_viewed, \
|
||||
from chalicelib.core.sessions import sessions, sessions_replay, sessions_favorite, sessions_viewed, \
|
||||
sessions_assignments, unprocessed_sessions, sessions_search
|
||||
from chalicelib.utils import SAML2_helper
|
||||
from chalicelib.utils import captcha, smtp
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
from .schemas import *
|
||||
from .schemas_ee import *
|
||||
from .assist_stats_schema import *
|
||||
from .product_analytics import *
|
||||
from . import overrides as _overrides
|
||||
from .schemas import _PaginatedSchema as PaginatedSchema
|
||||
|
|
@ -4,7 +4,7 @@ from pydantic import Field, EmailStr, field_validator, model_validator
|
|||
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from . import schemas
|
||||
from .overrides import BaseModel, Enum, ORUnion
|
||||
from .overrides import BaseModel, Enum
|
||||
from .transformers_validators import remove_whitespace
|
||||
|
||||
|
||||
|
|
@ -91,33 +91,6 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema):
|
|||
return values
|
||||
|
||||
|
||||
class SessionModel(BaseModel):
|
||||
duration: int
|
||||
errorsCount: int
|
||||
eventsCount: int
|
||||
favorite: bool = Field(default=False)
|
||||
issueScore: int
|
||||
issueTypes: List[schemas.IssueType] = Field(default=[])
|
||||
metadata: dict = Field(default={})
|
||||
pagesCount: int
|
||||
platform: str
|
||||
projectId: int
|
||||
sessionId: str
|
||||
startTs: int
|
||||
timezone: Optional[str]
|
||||
userAnonymousId: Optional[str]
|
||||
userBrowser: str
|
||||
userCity: str
|
||||
userCountry: str
|
||||
userDevice: Optional[str]
|
||||
userDeviceType: str
|
||||
userId: Optional[str]
|
||||
userOs: str
|
||||
userState: str
|
||||
userUuid: str
|
||||
viewed: bool = Field(default=False)
|
||||
|
||||
|
||||
class AssistRecordUpdatePayloadSchema(BaseModel):
|
||||
name: str = Field(..., min_length=1)
|
||||
_transform_name = field_validator('name', mode="before")(remove_whitespace)
|
||||
|
|
|
|||
5
ee/assist-server/.gitignore
vendored
Normal file
5
ee/assist-server/.gitignore
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
.idea
|
||||
node_modules
|
||||
npm-debug.log
|
||||
.cache
|
||||
*.mmdb
|
||||
24
ee/assist-server/Dockerfile
Normal file
24
ee/assist-server/Dockerfile
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
ARG ARCH=amd64
|
||||
|
||||
FROM --platform=linux/$ARCH node:23-alpine
|
||||
LABEL Maintainer="Zavorotynskiy Alexander <zavorotynskiy@pm.me>"
|
||||
RUN apk add --no-cache tini git libc6-compat
|
||||
ARG envarg
|
||||
ENV ENTERPRISE_BUILD=${envarg} \
|
||||
MAXMINDDB_FILE=/home/openreplay/geoip.mmdb \
|
||||
PRIVATE_ENDPOINTS=false \
|
||||
LISTEN_PORT=9001 \
|
||||
ERROR=1 \
|
||||
NODE_ENV=production
|
||||
WORKDIR /work
|
||||
COPY package.json .
|
||||
COPY package-lock.json .
|
||||
RUN npm install
|
||||
COPY . .
|
||||
|
||||
RUN adduser -u 1001 openreplay -D
|
||||
USER 1001
|
||||
ADD --chown=1001 https://static.openreplay.com/geoip/GeoLite2-City.mmdb $MAXMINDDB_FILE
|
||||
|
||||
ENTRYPOINT ["/sbin/tini", "--"]
|
||||
CMD npm start
|
||||
168
ee/assist-server/app/assist.js
Normal file
168
ee/assist-server/app/assist.js
Normal file
|
|
@ -0,0 +1,168 @@
|
|||
const jwt = require('jsonwebtoken');
|
||||
const uaParser = require('ua-parser-js');
|
||||
const {geoip} = require('./geoIP');
|
||||
const {logger} = require('./logger');
|
||||
|
||||
let PROJECT_KEY_LENGTH = parseInt(process.env.PROJECT_KEY_LENGTH) || 20;
|
||||
|
||||
const IDENTITIES = {agent: 'agent', session: 'session'};
|
||||
const EVENTS_DEFINITION = {
|
||||
listen: {
|
||||
UPDATE_EVENT: "UPDATE_SESSION", // tab become active/inactive, page title change, changed session object (rare case), call start/end
|
||||
CONNECT_ERROR: "connect_error",
|
||||
CONNECT_FAILED: "connect_failed",
|
||||
ERROR: "error"
|
||||
},
|
||||
//The following list of events will be only emitted by the server
|
||||
server: {
|
||||
UPDATE_SESSION: "SERVER_UPDATE_SESSION"
|
||||
}
|
||||
};
|
||||
EVENTS_DEFINITION.emit = {
|
||||
NEW_AGENT: "NEW_AGENT",
|
||||
NO_AGENTS: "NO_AGENT",
|
||||
AGENT_DISCONNECT: "AGENT_DISCONNECTED",
|
||||
AGENTS_CONNECTED: "AGENTS_CONNECTED",
|
||||
NO_SESSIONS: "SESSION_DISCONNECTED",
|
||||
SESSION_ALREADY_CONNECTED: "SESSION_ALREADY_CONNECTED",
|
||||
SESSION_RECONNECTED: "SESSION_RECONNECTED",
|
||||
UPDATE_EVENT: EVENTS_DEFINITION.listen.UPDATE_EVENT
|
||||
};
|
||||
|
||||
const BASE_sessionInfo = {
|
||||
"pageTitle": "Page",
|
||||
"active": false,
|
||||
"live": true,
|
||||
"sessionID": "0",
|
||||
"metadata": {},
|
||||
"userID": "",
|
||||
"userUUID": "",
|
||||
"projectKey": "",
|
||||
"revID": "",
|
||||
"timestamp": 0,
|
||||
"trackerVersion": "",
|
||||
"isSnippet": true,
|
||||
"userOs": "",
|
||||
"userBrowser": "",
|
||||
"userBrowserVersion": "",
|
||||
"userDevice": "",
|
||||
"userDeviceType": "",
|
||||
"userCountry": "",
|
||||
"userState": "",
|
||||
"userCity": "",
|
||||
"projectId": 0
|
||||
};
|
||||
|
||||
const extractPeerId = (peerId) => {
|
||||
const parts = peerId.split("-");
|
||||
if (parts.length < 2 || parts.length > 3) {
|
||||
logger.debug(`Invalid peerId format: ${peerId}`);
|
||||
return {};
|
||||
}
|
||||
if (PROJECT_KEY_LENGTH > 0 && parts[0].length !== PROJECT_KEY_LENGTH) {
|
||||
logger.debug(`Invalid project key length in peerId: ${peerId}`);
|
||||
return {};
|
||||
}
|
||||
const [projectKey, sessionId, tabId = generateRandomTabId()] = parts;
|
||||
return { projectKey, sessionId, tabId };
|
||||
};
|
||||
|
||||
const generateRandomTabId = () => (Math.random() + 1).toString(36).substring(2);
|
||||
|
||||
function processPeerInfo(socket) {
|
||||
socket._connectedAt = new Date();
|
||||
const { projectKey, sessionId, tabId } = extractPeerId(socket.handshake.query.peerId || "");
|
||||
Object.assign(socket.handshake.query, {
|
||||
roomId: projectKey && sessionId ? `${projectKey}-${sessionId}` : null,
|
||||
projectKey,
|
||||
sessId: sessionId,
|
||||
tabId
|
||||
});
|
||||
logger.debug(`Connection details: projectKey:${projectKey}, sessionId:${sessionId}, tabId:${tabId}, roomId:${socket.handshake.query.roomId}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* extracts and populate socket with information
|
||||
* @Param {socket} used socket
|
||||
* */
|
||||
const extractSessionInfo = function (socket) {
|
||||
if (socket.handshake.query.sessionInfo !== undefined) {
|
||||
logger.debug(`received headers: ${socket.handshake.headers}`);
|
||||
|
||||
socket.handshake.query.sessionInfo = JSON.parse(socket.handshake.query.sessionInfo);
|
||||
socket.handshake.query.sessionInfo = {...BASE_sessionInfo, ...socket.handshake.query.sessionInfo};
|
||||
|
||||
let ua = uaParser(socket.handshake.headers['user-agent']);
|
||||
socket.handshake.query.sessionInfo.userOs = ua.os.name || null;
|
||||
socket.handshake.query.sessionInfo.userBrowser = ua.browser.name || null;
|
||||
socket.handshake.query.sessionInfo.userBrowserVersion = ua.browser.version || null;
|
||||
socket.handshake.query.sessionInfo.userDevice = ua.device.model || null;
|
||||
socket.handshake.query.sessionInfo.userDeviceType = ua.device.type || 'desktop';
|
||||
socket.handshake.query.sessionInfo.userCountry = null;
|
||||
socket.handshake.query.sessionInfo.userState = null;
|
||||
socket.handshake.query.sessionInfo.userCity = null;
|
||||
if (geoip() !== null) {
|
||||
logger.debug(`looking for location of ${socket.handshake.headers['x-forwarded-for'] || socket.handshake.address}`);
|
||||
try {
|
||||
let ip = socket.handshake.headers['x-forwarded-for'] || socket.handshake.address;
|
||||
ip = ip.split(",")[0];
|
||||
let info = geoip().city(ip);
|
||||
socket.handshake.query.sessionInfo.userCountry = info.country.isoCode;
|
||||
socket.handshake.query.sessionInfo.userCity = info.city.names.en;
|
||||
socket.handshake.query.sessionInfo.userState = info.subdivisions.length > 0 ? info.subdivisions[0].names.en : null;
|
||||
} catch (e) {
|
||||
logger.debug(`geoip-country failed: ${e}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function errorHandler(listenerName, error) {
|
||||
logger.error(`Error detected from ${listenerName}\n${error}`);
|
||||
}
|
||||
|
||||
const JWT_TOKEN_PREFIX = "Bearer ";
|
||||
|
||||
function check(socket, next) {
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
return next();
|
||||
}
|
||||
if (socket.handshake.query.peerId && socket.handshake.auth && socket.handshake.auth.token) {
|
||||
let token = socket.handshake.auth.token;
|
||||
if (token.startsWith(JWT_TOKEN_PREFIX)) {
|
||||
token = token.substring(JWT_TOKEN_PREFIX.length);
|
||||
}
|
||||
jwt.verify(token, process.env.ASSIST_JWT_SECRET, (err, decoded) => {
|
||||
logger.debug(`JWT payload: ${decoded}`);
|
||||
if (err) {
|
||||
logger.debug(err);
|
||||
return next(new Error('Authentication error'));
|
||||
}
|
||||
const {projectKey, sessionId} = extractPeerId(socket.handshake.query.peerId);
|
||||
if (!projectKey || !sessionId) {
|
||||
logger.debug(`Missing attribute: projectKey:${projectKey}, sessionId:${sessionId}`);
|
||||
return next(new Error('Authentication error'));
|
||||
}
|
||||
if (String(projectKey) !== String(decoded.projectKey) || String(sessionId) !== String(decoded.sessionId)) {
|
||||
logger.debug(`Trying to access projectKey:${projectKey} instead of ${decoded.projectKey} or
|
||||
to sessionId:${sessionId} instead of ${decoded.sessionId}`);
|
||||
return next(new Error('Authorization error'));
|
||||
}
|
||||
socket.decoded = decoded;
|
||||
return next();
|
||||
});
|
||||
} else {
|
||||
logger.debug(`something missing in handshake: ${socket.handshake}`);
|
||||
return next(new Error('Authentication error'));
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
processPeerInfo,
|
||||
extractPeerId,
|
||||
extractSessionInfo,
|
||||
EVENTS_DEFINITION,
|
||||
IDENTITIES,
|
||||
errorHandler,
|
||||
authorizer: {check}
|
||||
};
|
||||
109
ee/assist-server/app/cache.js
Normal file
109
ee/assist-server/app/cache.js
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
const {logger} = require('./logger');
|
||||
const {createClient} = require("redis");
|
||||
const crypto = require("crypto");
|
||||
|
||||
let redisClient;
|
||||
const REDIS_URL = (process.env.REDIS_URL || "localhost:6379").replace(/((^\w+:|^)\/\/|^)/, 'redis://');
|
||||
redisClient = createClient({url: REDIS_URL});
|
||||
redisClient.on("error", (error) => logger.error(`Redis cache error : ${error}`));
|
||||
void redisClient.connect();
|
||||
|
||||
function generateNodeID() {
|
||||
const buffer = crypto.randomBytes(8);
|
||||
return "node_"+buffer.readBigUInt64BE(0).toString();
|
||||
}
|
||||
|
||||
const PING_INTERVAL = parseInt(process.env.PING_INTERVAL_SECONDS) || 25;
|
||||
const CACHE_REFRESH_INTERVAL = parseInt(process.env.CACHE_REFRESH_INTERVAL_SECONDS) || 10;
|
||||
const pingInterval = PING_INTERVAL + PING_INTERVAL/2;
|
||||
const cacheRefreshInterval = CACHE_REFRESH_INTERVAL + CACHE_REFRESH_INTERVAL/2;
|
||||
const cacheRefreshIntervalMs = CACHE_REFRESH_INTERVAL * 1000;
|
||||
let lastCacheUpdateTime = 0;
|
||||
let cacheRefresher = null;
|
||||
const nodeID = process.env.HOSTNAME || generateNodeID();
|
||||
|
||||
const addSessionToCache = async function (sessionID, sessionData) {
|
||||
try {
|
||||
await redisClient.set(`active_sessions:${sessionID}`, JSON.stringify(sessionData), 'EX', pingInterval);
|
||||
logger.debug(`Session ${sessionID} stored in Redis`);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
const renewSession = async function (sessionID){
|
||||
try {
|
||||
await redisClient.expire(`active_sessions:${sessionID}`, pingInterval);
|
||||
logger.debug(`Session ${sessionID} renewed in Redis`);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
const getSessionFromCache = async function (sessionID) {
|
||||
try {
|
||||
const sessionData = await redisClient.get(`active_sessions:${sessionID}`);
|
||||
if (sessionData) {
|
||||
logger.debug(`Session ${sessionID} retrieved from Redis`);
|
||||
return JSON.parse(sessionData);
|
||||
}
|
||||
return null;
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const removeSessionFromCache = async function (sessionID) {
|
||||
try {
|
||||
await redisClient.del(`active_sessions:${sessionID}`);
|
||||
logger.debug(`Session ${sessionID} removed from Redis`);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
const setNodeSessions = async function (nodeID, sessionIDs) {
|
||||
try {
|
||||
await redisClient.set(`node:${nodeID}:sessions`, JSON.stringify(sessionIDs), 'EX', cacheRefreshInterval);
|
||||
logger.debug(`Node ${nodeID} sessions stored in Redis`);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
function startCacheRefresher(io) {
|
||||
if (cacheRefresher) clearInterval(cacheRefresher);
|
||||
|
||||
cacheRefresher = setInterval(async () => {
|
||||
const now = Date.now();
|
||||
if (now - lastCacheUpdateTime < cacheRefreshIntervalMs) {
|
||||
return;
|
||||
}
|
||||
logger.debug('Background refresh triggered');
|
||||
try {
|
||||
const startTime = performance.now();
|
||||
const sessionIDs = new Set();
|
||||
const result = await io.fetchSockets();
|
||||
result.forEach((socket) => {
|
||||
if (socket.handshake.query.sessId) {
|
||||
sessionIDs.add(socket.handshake.query.sessId);
|
||||
}
|
||||
})
|
||||
await setNodeSessions(nodeID, Array.from(sessionIDs));
|
||||
lastCacheUpdateTime = now;
|
||||
const duration = performance.now() - startTime;
|
||||
logger.info(`Background refresh complete: ${duration}ms, ${result.length} sockets`);
|
||||
} catch (error) {
|
||||
logger.error(`Background refresh error: ${error}`);
|
||||
}
|
||||
}, cacheRefreshIntervalMs / 2);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
addSessionToCache,
|
||||
renewSession,
|
||||
getSessionFromCache,
|
||||
removeSessionFromCache,
|
||||
startCacheRefresher,
|
||||
}
|
||||
21
ee/assist-server/app/geoIP.js
Normal file
21
ee/assist-server/app/geoIP.js
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
const geoip2Reader = require('@maxmind/geoip2-node').Reader;
|
||||
const {logger} = require('./logger');
|
||||
|
||||
let geoip = null;
|
||||
if (process.env.MAXMINDDB_FILE !== undefined) {
|
||||
geoip2Reader.open(process.env.MAXMINDDB_FILE, {})
|
||||
.then(reader => {
|
||||
geoip = reader;
|
||||
})
|
||||
.catch(error => {
|
||||
logger.error(`Error while opening the MAXMINDDB_FILE, err: ${error}`);
|
||||
});
|
||||
} else {
|
||||
logger.error("!!! please provide a valid value for MAXMINDDB_FILE env var.");
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
geoip: () => {
|
||||
return geoip;
|
||||
}
|
||||
}
|
||||
23
ee/assist-server/app/logger.js
Normal file
23
ee/assist-server/app/logger.js
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
const winston = require('winston');
|
||||
|
||||
const isDebugMode = process.env.debug === "1";
|
||||
const logLevel = isDebugMode ? 'debug' : 'info';
|
||||
|
||||
const logger = winston.createLogger({
|
||||
level: logLevel,
|
||||
format: winston.format.combine(
|
||||
winston.format.timestamp({
|
||||
format: 'YYYY-MM-DD HH:mm:ss.SSS' // The same format as in backend services
|
||||
}),
|
||||
winston.format.errors({stack: true}),
|
||||
winston.format.json()
|
||||
),
|
||||
defaultMeta: {service: process.env.SERVICE_NAME || 'assist'},
|
||||
transports: [
|
||||
new winston.transports.Console(),
|
||||
],
|
||||
});
|
||||
|
||||
module.exports = {
|
||||
logger,
|
||||
}
|
||||
254
ee/assist-server/app/socket.js
Normal file
254
ee/assist-server/app/socket.js
Normal file
|
|
@ -0,0 +1,254 @@
|
|||
const {
|
||||
processPeerInfo,
|
||||
IDENTITIES,
|
||||
EVENTS_DEFINITION,
|
||||
extractSessionInfo,
|
||||
errorHandler
|
||||
} = require("./assist");
|
||||
const {
|
||||
addSessionToCache,
|
||||
renewSession,
|
||||
removeSessionFromCache
|
||||
} = require('./cache');
|
||||
const {
|
||||
logger
|
||||
} = require('./logger');
|
||||
const deepMerge = require('@fastify/deepmerge')({all: true});
|
||||
|
||||
let io;
|
||||
|
||||
const setSocketIOServer = function (server) {
|
||||
io = server;
|
||||
}
|
||||
|
||||
function sendFrom(from, to, eventName, ...data) {
|
||||
from.to(to).emit(eventName, ...data);
|
||||
}
|
||||
|
||||
function sendTo(to, eventName, ...data) {
|
||||
sendFrom(io, to, eventName, ...data);
|
||||
}
|
||||
|
||||
const fetchSockets = async function (roomID) {
|
||||
if (!io) {
|
||||
return [];
|
||||
}
|
||||
try {
|
||||
if (roomID) {
|
||||
return await io.in(roomID).fetchSockets();
|
||||
} else {
|
||||
return await io.fetchSockets();
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error fetching sockets:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
const findSessionSocketId = async (roomId, tabId) => {
|
||||
let pickFirstSession = tabId === undefined;
|
||||
const connected_sockets = await fetchSockets(roomId);
|
||||
for (let socket of connected_sockets) {
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
if (pickFirstSession) {
|
||||
return socket.id;
|
||||
} else if (socket.handshake.query.tabId === tabId) {
|
||||
return socket.id;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
async function getRoomData(roomID) {
|
||||
let tabsCount = 0, agentsCount = 0, tabIDs = [], agentIDs = [];
|
||||
const connected_sockets = await fetchSockets(roomID);
|
||||
if (connected_sockets.length > 0) {
|
||||
for (let socket of connected_sockets) {
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
tabsCount++;
|
||||
tabIDs.push(socket.handshake.query.tabId);
|
||||
} else {
|
||||
agentsCount++;
|
||||
agentIDs.push(socket.id);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
tabsCount = -1;
|
||||
agentsCount = -1;
|
||||
}
|
||||
return {tabsCount, agentsCount, tabIDs, agentIDs};
|
||||
}
|
||||
|
||||
async function onConnect(socket) {
|
||||
logger.debug(`A new client:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
|
||||
// Drop unknown socket.io connections
|
||||
if (socket.handshake.query.identity === undefined || socket.handshake.query.peerId === undefined || socket.handshake.query.sessionInfo === undefined) {
|
||||
logger.debug(`something is undefined, refusing connexion`);
|
||||
return socket.disconnect();
|
||||
}
|
||||
processPeerInfo(socket);
|
||||
|
||||
const {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(socket.handshake.query.roomId);
|
||||
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
// Check if session with the same tabID already connected, if so, refuse new connexion
|
||||
if (tabsCount > 0) {
|
||||
for (let tab of tabIDs) {
|
||||
if (tab === socket.handshake.query.tabId) {
|
||||
logger.debug(`session already connected, refusing new connexion, peerId: ${socket.handshake.query.peerId}`);
|
||||
sendTo(socket.id, EVENTS_DEFINITION.emit.SESSION_ALREADY_CONNECTED);
|
||||
return socket.disconnect();
|
||||
}
|
||||
}
|
||||
}
|
||||
extractSessionInfo(socket);
|
||||
if (tabsCount < 0) {
|
||||
// New session creates new room
|
||||
}
|
||||
// Inform all connected agents about reconnected session
|
||||
if (agentsCount > 0) {
|
||||
logger.debug(`notifying new session about agent-existence`);
|
||||
sendTo(socket.id, EVENTS_DEFINITION.emit.AGENTS_CONNECTED, agentIDs);
|
||||
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.SESSION_RECONNECTED, socket.id);
|
||||
}
|
||||
} else if (tabsCount <= 0) {
|
||||
logger.debug(`notifying new agent about no SESSIONS with peerId:${socket.handshake.query.peerId}`);
|
||||
sendTo(socket.id, EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
}
|
||||
|
||||
await socket.join(socket.handshake.query.roomId);
|
||||
logger.debug(`${socket.id} joined room:${socket.handshake.query.roomId}, as:${socket.handshake.query.identity}, connections:${agentsCount + tabsCount + 1}`)
|
||||
|
||||
// Add session to cache
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
await addSessionToCache(socket.handshake.query.sessId, socket.handshake.query.sessionInfo);
|
||||
}
|
||||
|
||||
if (socket.handshake.query.identity === IDENTITIES.agent) {
|
||||
if (socket.handshake.query.agentInfo !== undefined) {
|
||||
socket.handshake.query.agentInfo = JSON.parse(socket.handshake.query.agentInfo);
|
||||
socket.handshake.query.agentID = socket.handshake.query.agentInfo.id;
|
||||
}
|
||||
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.NEW_AGENT, socket.id, socket.handshake.query.agentInfo);
|
||||
}
|
||||
|
||||
socket.conn.on("packet", (packet) => {
|
||||
if (packet.type === 'pong') {
|
||||
renewSession(socket.handshake.query.sessId);
|
||||
}
|
||||
});
|
||||
|
||||
// Set disconnect handler
|
||||
socket.on('disconnect', () => onDisconnect(socket));
|
||||
|
||||
// Handle update event
|
||||
socket.on(EVENTS_DEFINITION.listen.UPDATE_EVENT, (...args) => onUpdateEvent(socket, ...args));
|
||||
|
||||
// Handle webrtc events
|
||||
socket.on(EVENTS_DEFINITION.listen.WEBRTC_AGENT_CALL, (...args) => onWebrtcAgentHandler(socket, ...args));
|
||||
|
||||
// Handle errors
|
||||
socket.on(EVENTS_DEFINITION.listen.ERROR, err => errorHandler(EVENTS_DEFINITION.listen.ERROR, err));
|
||||
socket.on(EVENTS_DEFINITION.listen.CONNECT_ERROR, err => errorHandler(EVENTS_DEFINITION.listen.CONNECT_ERROR, err));
|
||||
socket.on(EVENTS_DEFINITION.listen.CONNECT_FAILED, err => errorHandler(EVENTS_DEFINITION.listen.CONNECT_FAILED, err));
|
||||
|
||||
// Handle all other events (usually dom's mutations and user's actions)
|
||||
socket.onAny((eventName, ...args) => onAny(socket, eventName, ...args));
|
||||
}
|
||||
|
||||
async function onDisconnect(socket) {
|
||||
logger.debug(`${socket.id} disconnected from ${socket.handshake.query.roomId}`);
|
||||
|
||||
if (socket.handshake.query.identity === IDENTITIES.agent) {
|
||||
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.AGENT_DISCONNECT, socket.id);
|
||||
}
|
||||
logger.debug("checking for number of connected agents and sessions");
|
||||
let {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(socket.handshake.query.roomId);
|
||||
|
||||
if (tabsCount <= 0) {
|
||||
await removeSessionFromCache(socket.handshake.query.sessId);
|
||||
}
|
||||
|
||||
if (tabsCount === -1 && agentsCount === -1) {
|
||||
logger.debug(`room not found: ${socket.handshake.query.roomId}`);
|
||||
return;
|
||||
}
|
||||
if (tabsCount === 0) {
|
||||
logger.debug(`notifying everyone in ${socket.handshake.query.roomId} about no SESSIONS`);
|
||||
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
}
|
||||
if (agentsCount === 0) {
|
||||
logger.debug(`notifying everyone in ${socket.handshake.query.roomId} about no AGENTS`);
|
||||
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.NO_AGENTS);
|
||||
}
|
||||
}
|
||||
|
||||
async function onUpdateEvent(socket, ...args) {
|
||||
logger.debug(`${socket.id} sent update event.`);
|
||||
if (socket.handshake.query.identity !== IDENTITIES.session) {
|
||||
logger.debug('Ignoring update event.');
|
||||
return
|
||||
}
|
||||
|
||||
args[0] = updateSessionData(socket, args[0])
|
||||
socket.handshake.query.sessionInfo = deepMerge(socket.handshake.query.sessionInfo, args[0]?.data, {tabId: args[0]?.meta?.tabId});
|
||||
|
||||
// update session cache
|
||||
await addSessionToCache(socket.handshake.query.sessId, socket.handshake.query.sessionInfo);
|
||||
|
||||
// Update sessionInfo for all agents in the room
|
||||
const connected_sockets = await fetchSockets(socket.handshake.query.roomId);
|
||||
for (let item of connected_sockets) {
|
||||
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo) {
|
||||
item.handshake.query.sessionInfo = deepMerge(item.handshake.query.sessionInfo, args[0]?.data, {tabId: args[0]?.meta?.tabId});
|
||||
} else if (item.handshake.query.identity === IDENTITIES.agent) {
|
||||
sendFrom(socket, item.id, EVENTS_DEFINITION.emit.UPDATE_EVENT, args[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function onWebrtcAgentHandler(socket, ...args) {
|
||||
if (socket.handshake.query.identity === IDENTITIES.agent) {
|
||||
const agentIdToConnect = args[0]?.data?.toAgentId;
|
||||
logger.debug(`${socket.id} sent webrtc event to agent:${agentIdToConnect}`);
|
||||
if (agentIdToConnect && socket.handshake.sessionData.AGENTS_CONNECTED.includes(agentIdToConnect)) {
|
||||
sendFrom(socket, agentIdToConnect, EVENTS_DEFINITION.listen.WEBRTC_AGENT_CALL, args[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function onAny(socket, eventName, ...args) {
|
||||
if (Object.values(EVENTS_DEFINITION.listen).indexOf(eventName) >= 0) {
|
||||
logger.debug(`received event:${eventName}, should be handled by another listener, stopping onAny.`);
|
||||
return
|
||||
}
|
||||
args[0] = updateSessionData(socket, args[0])
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
logger.debug(`received event:${eventName}, from:${socket.handshake.query.identity}, sending message to room:${socket.handshake.query.roomId}`);
|
||||
sendFrom(socket, socket.handshake.query.roomId, eventName, args[0]);
|
||||
} else {
|
||||
logger.debug(`received event:${eventName}, from:${socket.handshake.query.identity}, sending message to session of room:${socket.handshake.query.roomId}`);
|
||||
let socketId = await findSessionSocketId(socket.handshake.query.roomId, args[0]?.meta?.tabId);
|
||||
if (socketId === null) {
|
||||
logger.debug(`session not found for:${socket.handshake.query.roomId}`);
|
||||
sendTo(socket.id, EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
} else {
|
||||
logger.debug("message sent");
|
||||
sendTo(socket.id, eventName, socket.id, args[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Back compatibility (add top layer with meta information)
|
||||
function updateSessionData(socket, sessionData) {
|
||||
if (sessionData?.meta === undefined && socket.handshake.query.identity === IDENTITIES.session) {
|
||||
sessionData = {meta: {tabId: socket.handshake.query.tabId, version: 1}, data: sessionData};
|
||||
}
|
||||
return sessionData
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
onConnect,
|
||||
setSocketIOServer,
|
||||
}
|
||||
1761
ee/assist-server/package-lock.json
generated
Normal file
1761
ee/assist-server/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
24
ee/assist-server/package.json
Normal file
24
ee/assist-server/package.json
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
{
|
||||
"name": "assist-server",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@fastify/deepmerge": "^3.0.0",
|
||||
"@maxmind/geoip2-node": "^6.0.0",
|
||||
"express": "^4.21.2",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"redis": "^4.7.0",
|
||||
"socket.io": "^4.8.1",
|
||||
"socket.io-client": "^4.8.1",
|
||||
"ua-parser-js": "^2.0.3",
|
||||
"uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.51.0",
|
||||
"winston": "^3.17.0"
|
||||
}
|
||||
}
|
||||
67
ee/assist-server/server.js
Normal file
67
ee/assist-server/server.js
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
const { App } = require('uWebSockets.js');
|
||||
const { Server } = require('socket.io');
|
||||
const { logger } = require("./app/logger");
|
||||
const { authorizer } = require("./app/assist");
|
||||
const { onConnect, setSocketIOServer } = require("./app/socket");
|
||||
const { startCacheRefresher } = require("./app/cache");
|
||||
|
||||
const app = App();
|
||||
const pingInterval = parseInt(process.env.PING_INTERVAL) || 25000;
|
||||
|
||||
const getCompressionConfig = function () {
|
||||
// WS: The theoretical overhead per socket is 19KB (11KB for compressor and 8KB for decompressor)
|
||||
let perMessageDeflate = false;
|
||||
if (process.env.COMPRESSION === "true") {
|
||||
logger.info(`WS compression: enabled`);
|
||||
perMessageDeflate = {
|
||||
zlibDeflateOptions: {
|
||||
windowBits: 10,
|
||||
memLevel: 1
|
||||
},
|
||||
zlibInflateOptions: {
|
||||
windowBits: 10
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logger.info(`WS compression: disabled`);
|
||||
}
|
||||
return {
|
||||
perMessageDeflate: perMessageDeflate,
|
||||
clientNoContextTakeover: true
|
||||
};
|
||||
}
|
||||
|
||||
const io = new Server({
|
||||
maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6,
|
||||
pingInterval: pingInterval, // Will use it for cache invalidation
|
||||
cors: {
|
||||
origin: "*", // Allow connections from any origin (for development)
|
||||
methods: ["GET", "POST"],
|
||||
credentials: true
|
||||
},
|
||||
path: '/socket',
|
||||
...getCompressionConfig()
|
||||
});
|
||||
|
||||
io.use(async (socket, next) => await authorizer.check(socket, next));
|
||||
io.on('connection', (socket) => onConnect(socket));
|
||||
io.attachApp(app);
|
||||
io.engine.on("headers", (headers) => {
|
||||
headers["x-host-id"] = process.env.HOSTNAME || "unknown";
|
||||
});
|
||||
setSocketIOServer(io);
|
||||
|
||||
const HOST = process.env.LISTEN_HOST || '0.0.0.0';
|
||||
const PORT = parseInt(process.env.PORT) || 9001;
|
||||
app.listen(PORT, (token) => {
|
||||
if (token) {
|
||||
console.log(`Server running at http://${HOST}:${PORT}`);
|
||||
} else {
|
||||
console.log(`Failed to listen on port ${PORT}`);
|
||||
}
|
||||
});
|
||||
startCacheRefresher(io);
|
||||
|
||||
process.on('uncaughtException', err => {
|
||||
logger.error(`Uncaught Exception: ${err}`);
|
||||
});
|
||||
|
|
@ -121,7 +121,16 @@ func (s *storageImpl) Get(sessionID uint64) (*Session, error) {
|
|||
|
||||
// For the ender service only
|
||||
func (s *storageImpl) GetMany(sessionIDs []uint64) ([]*Session, error) {
|
||||
rows, err := s.db.Query("SELECT session_id, COALESCE( duration, 0 ), start_ts FROM sessions WHERE session_id = ANY($1)", pq.Array(sessionIDs))
|
||||
rows, err := s.db.Query(`
|
||||
SELECT
|
||||
session_id,
|
||||
CASE
|
||||
WHEN duration IS NULL OR duration < 0 THEN 0
|
||||
ELSE duration
|
||||
END,
|
||||
start_ts
|
||||
FROM sessions
|
||||
WHERE session_id = ANY($1)`, pq.Array(sessionIDs))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue