Compare commits
156 commits
go_pa_char
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
90510aa33b | ||
|
|
96a70f5d41 | ||
|
|
d4a13edcf0 | ||
|
|
51fad91a22 | ||
|
|
36abcda1e1 | ||
|
|
dd5f464f73 | ||
|
|
f9ada41272 | ||
|
|
9e24a3583e | ||
|
|
0a3129d3cd | ||
|
|
99d61db9d9 | ||
|
|
133958622e | ||
|
|
fb021f606f | ||
|
|
a2905fa8ed | ||
|
|
beec2283fd | ||
|
|
6c8b55019e | ||
|
|
e3e3e11227 | ||
|
|
c6f7de04cc | ||
|
|
2921c17cbf | ||
|
|
7eb3f5c4c8 | ||
|
|
5a9a8e588a | ||
|
|
4b14258266 | ||
|
|
744d2d4311 | ||
|
|
64242a5dc0 | ||
|
|
cae3002697 | ||
|
|
3d3c62196b | ||
|
|
e810958a5d | ||
|
|
39fa9787d1 | ||
|
|
c9c1ad4dde | ||
|
|
d9868928be | ||
|
|
a460d8c9a2 | ||
|
|
930417aab4 | ||
|
|
07bc184f4d | ||
|
|
71b7cca569 | ||
|
|
355d27eaa0 | ||
|
|
66b485cccf | ||
|
|
de33a42151 | ||
|
|
f12bdebf82 | ||
|
|
bbfa20c693 | ||
|
|
f264ba043d | ||
|
|
a05dce8125 | ||
|
|
3a1635d81f | ||
|
|
ccb332c636 | ||
|
|
80ffa15959 | ||
|
|
b2e961d621 | ||
|
|
b4d0598f23 | ||
|
|
e77f083f10 | ||
|
|
58da1d3f64 | ||
|
|
447fc26a2a | ||
|
|
9bdf6e4f92 | ||
|
|
01f403e12d | ||
|
|
39eb943b86 | ||
|
|
366b0d38b0 | ||
|
|
f4d5b3c06e | ||
|
|
93ae18133e | ||
|
|
fbe5d78270 | ||
|
|
b803eed1d4 | ||
|
|
9ed3cb1b7e | ||
|
|
5e0e5730ba | ||
|
|
d78b33dcd2 | ||
|
|
4b1ca200b4 | ||
|
|
08d930f9ff | ||
|
|
da37809bc8 | ||
|
|
d922fc7ad5 | ||
|
|
796360fdd2 | ||
|
|
13dbb60d8b | ||
|
|
9e20a49128 | ||
|
|
91f8cc1399 | ||
|
|
f8ba3f6d89 | ||
|
|
85e30b3692 | ||
|
|
0360e3726e | ||
|
|
77bbb5af36 | ||
|
|
ab0d4cfb62 | ||
|
|
3fd506a812 | ||
|
|
e8432e2dec | ||
|
|
5c76a8524c | ||
|
|
3ba40a4811 | ||
|
|
f9a3f24590 | ||
|
|
85d6d0abac | ||
|
|
b3594136ce | ||
|
|
8f67edde8d | ||
|
|
74ed29915b | ||
|
|
3ca71ec211 | ||
|
|
0e469fd056 | ||
|
|
a8cb0e1643 | ||
|
|
e171f0d8d5 | ||
|
|
68ea291444 | ||
|
|
05cbb831c7 | ||
|
|
5070ded1f4 | ||
|
|
77610a4924 | ||
|
|
7c34e4a0f6 | ||
|
|
330e21183f | ||
|
|
30ce37896c | ||
|
|
80a7817e7d | ||
|
|
1b9c568cb1 | ||
|
|
3759771ae9 | ||
|
|
f6ae5aba88 | ||
|
|
5190dc512a | ||
|
|
3fcccb51e8 | ||
|
|
26077d5689 | ||
|
|
00c57348fd | ||
|
|
1f9bc5520a | ||
|
|
aef94618f6 | ||
|
|
2a330318c7 | ||
|
|
6777d5ce2a | ||
|
|
8a6f8fe91f | ||
|
|
7b078fed4c | ||
|
|
894d4c84b3 | ||
|
|
46390a3ba9 | ||
|
|
621667f5ce | ||
|
|
a72f476f1c | ||
|
|
623946ce4e | ||
|
|
2d099214fc | ||
|
|
b0e7054f89 | ||
|
|
a9097270af | ||
|
|
5d514ddaf2 | ||
|
|
43688bb03b | ||
|
|
e050cee7bb | ||
|
|
6b35df7125 | ||
|
|
8e099b6dc3 | ||
|
|
c0a4734054 | ||
|
|
7de1efb5fe | ||
|
|
d4ff28ddbe | ||
|
|
b2256f72d0 | ||
|
|
a63bda1c79 | ||
|
|
3a0176789e | ||
|
|
f2b7271fca | ||
|
|
d50f89662b | ||
|
|
35051d201c | ||
|
|
214be95ecc | ||
|
|
dbc142c114 | ||
|
|
443f5e8f08 | ||
|
|
9f693f220d | ||
|
|
5ab30380b0 | ||
|
|
fc86555644 | ||
|
|
2a3c611a27 | ||
|
|
1d6fb0ae9e | ||
|
|
bef91a6136 | ||
|
|
1e2bd19d32 | ||
|
|
3b58cb347e | ||
|
|
ca4590501a | ||
|
|
fd12cc7585 | ||
|
|
6abded53e0 | ||
|
|
82c5e5e59d | ||
|
|
c77b0cc4de | ||
|
|
de344e62ef | ||
|
|
deb78a62c0 | ||
|
|
0724cf05f0 | ||
|
|
cc704f1bc3 | ||
|
|
4c159b2d26 | ||
|
|
42df33bc01 | ||
|
|
ae95b48760 | ||
|
|
4be3050e61 | ||
|
|
8eec6e983b | ||
|
|
5fec615044 | ||
|
|
f77568a01c | ||
|
|
618e4dc59f |
512 changed files with 10521 additions and 31465 deletions
|
|
@ -47,7 +47,6 @@ runs:
|
|||
"JWT_SECRET:.global.jwtSecret"
|
||||
"JWT_SPOT_REFRESH_SECRET:.chalice.env.JWT_SPOT_REFRESH_SECRET"
|
||||
"JWT_SPOT_SECRET:.global.jwtSpotSecret"
|
||||
"JWT_SECRET:.global.tokenSecret"
|
||||
"LICENSE_KEY:.global.enterpriseEditionLicense"
|
||||
"MINIO_ACCESS_KEY:.global.s3.accessKey"
|
||||
"MINIO_SECRET_KEY:.global.s3.secretKey"
|
||||
|
|
|
|||
33
.github/workflows/frontend-tests.yaml
vendored
33
.github/workflows/frontend-tests.yaml
vendored
|
|
@ -1,33 +0,0 @@
|
|||
name: Frontend tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
- '.github/workflows/frontend-test.yaml'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: frontend
|
||||
run: yarn
|
||||
|
||||
- name: Run tests
|
||||
working-directory: frontend
|
||||
run: yarn test:ci
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
directory: frontend/coverage/
|
||||
|
||||
243
.github/workflows/patch-build.yaml
vendored
243
.github/workflows/patch-build.yaml
vendored
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
description: 'This workflow will build for patches for latest tag, and will Always use commit from main branch.'
|
||||
inputs:
|
||||
services:
|
||||
description: 'Comma separated names of services to build(in small letters).'
|
||||
|
|
@ -20,12 +19,20 @@ jobs:
|
|||
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Rebase with main branch, to make sure the code has latest main changes
|
||||
if: github.ref != 'refs/heads/main'
|
||||
run: |
|
||||
git pull --rebase origin main
|
||||
git remote -v
|
||||
git config --global user.email "action@github.com"
|
||||
git config --global user.name "GitHub Action"
|
||||
git config --global rebase.autoStash true
|
||||
git fetch origin main:main
|
||||
git rebase main
|
||||
git log -3
|
||||
|
||||
- name: Downloading yq
|
||||
run: |
|
||||
|
|
@ -48,6 +55,8 @@ jobs:
|
|||
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }}
|
||||
|
||||
- uses: depot/setup-action@v1
|
||||
env:
|
||||
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
|
||||
- name: Get HEAD Commit ID
|
||||
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||
- name: Define Branch Name
|
||||
|
|
@ -65,78 +74,168 @@ jobs:
|
|||
MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
|
||||
MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
|
||||
MSAAS_REPO_FOLDER: /tmp/msaas
|
||||
SERVICES_INPUT: ${{ github.event.inputs.services }}
|
||||
run: |
|
||||
set -exo pipefail
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
git checkout -b $BRANCH_NAME
|
||||
working_dir=$(pwd)
|
||||
function image_version(){
|
||||
local service=$1
|
||||
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
||||
current_version=$(yq eval '.AppVersion' $chart_path)
|
||||
new_version=$(echo $current_version | awk -F. '{$NF += 1 ; print $1"."$2"."$3}')
|
||||
echo $new_version
|
||||
# yq eval ".AppVersion = \"$new_version\"" -i $chart_path
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
# Configuration
|
||||
readonly WORKING_DIR=$(pwd)
|
||||
readonly BUILD_SCRIPT_NAME="build.sh"
|
||||
readonly BACKEND_SERVICES_FILE="/tmp/backend.txt"
|
||||
|
||||
# Initialize git configuration
|
||||
setup_git() {
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
git checkout -b "$BRANCH_NAME"
|
||||
}
|
||||
function clone_msaas() {
|
||||
[ -d $MSAAS_REPO_FOLDER ] || {
|
||||
git clone -b dev --recursive https://x-access-token:$MSAAS_REPO_CLONE_TOKEN@$MSAAS_REPO_URL $MSAAS_REPO_FOLDER
|
||||
cd $MSAAS_REPO_FOLDER
|
||||
cd openreplay && git fetch origin && git checkout main # This have to be changed to specific tag
|
||||
git log -1
|
||||
cd $MSAAS_REPO_FOLDER
|
||||
bash git-init.sh
|
||||
git checkout
|
||||
}
|
||||
|
||||
# Get and increment image version
|
||||
image_version() {
|
||||
local service=$1
|
||||
local chart_path="$WORKING_DIR/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
||||
local current_version new_version
|
||||
|
||||
current_version=$(yq eval '.AppVersion' "$chart_path")
|
||||
new_version=$(echo "$current_version" | awk -F. '{$NF += 1; print $1"."$2"."$3}')
|
||||
echo "$new_version"
|
||||
}
|
||||
function build_managed() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
echo building managed
|
||||
clone_msaas
|
||||
if [[ $service == 'chalice' ]]; then
|
||||
cd $MSAAS_REPO_FOLDER/openreplay/api
|
||||
else
|
||||
cd $MSAAS_REPO_FOLDER/openreplay/$service
|
||||
fi
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh >> /tmp/arm.txt
|
||||
|
||||
# Clone MSAAS repository if not exists
|
||||
clone_msaas() {
|
||||
if [[ ! -d "$MSAAS_REPO_FOLDER" ]]; then
|
||||
git clone -b dev --recursive "https://x-access-token:${MSAAS_REPO_CLONE_TOKEN}@${MSAAS_REPO_URL}" "$MSAAS_REPO_FOLDER"
|
||||
cd "$MSAAS_REPO_FOLDER"
|
||||
cd openreplay && git fetch origin && git checkout main
|
||||
git log -1
|
||||
cd "$MSAAS_REPO_FOLDER"
|
||||
bash git-init.sh
|
||||
git checkout
|
||||
fi
|
||||
}
|
||||
# Checking for backend images
|
||||
ls backend/cmd >> /tmp/backend.txt
|
||||
echo Services: "${{ github.event.inputs.services }}"
|
||||
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}"
|
||||
BUILD_SCRIPT_NAME="build.sh"
|
||||
# Build FOSS
|
||||
for SERVICE in "${SERVICES[@]}"; do
|
||||
# Check if service is backend
|
||||
if grep -q $SERVICE /tmp/backend.txt; then
|
||||
cd backend
|
||||
foss_build_args="nil $SERVICE"
|
||||
ee_build_args="ee $SERVICE"
|
||||
else
|
||||
[[ $SERVICE == 'chalice' || $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && cd $working_dir/api || cd $SERVICE
|
||||
[[ $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && BUILD_SCRIPT_NAME="build_${SERVICE}.sh"
|
||||
ee_build_args="ee"
|
||||
fi
|
||||
version=$(image_version $SERVICE)
|
||||
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
echo IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
|
||||
IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
|
||||
if [[ "$SERVICE" != "chalice" && "$SERVICE" != "frontend" ]]; then
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
else
|
||||
build_managed $SERVICE $version
|
||||
fi
|
||||
cd $working_dir
|
||||
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$SERVICE/Chart.yaml"
|
||||
yq eval ".AppVersion = \"$version\"" -i $chart_path
|
||||
git add $chart_path
|
||||
git commit -m "Increment $SERVICE chart version"
|
||||
git push --set-upstream origin $BRANCH_NAME
|
||||
done
|
||||
|
||||
# Build managed services
|
||||
build_managed() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
|
||||
echo "Building managed service: $service"
|
||||
clone_msaas
|
||||
|
||||
if [[ $service == 'chalice' ]]; then
|
||||
cd "$MSAAS_REPO_FOLDER/openreplay/api"
|
||||
else
|
||||
cd "$MSAAS_REPO_FOLDER/openreplay/$service"
|
||||
fi
|
||||
|
||||
local build_cmd="IMAGE_TAG=$version DOCKER_RUNTIME=depot DOCKER_BUILD_ARGS=--push ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh"
|
||||
|
||||
echo "Executing: $build_cmd"
|
||||
if ! eval "$build_cmd" 2>&1; then
|
||||
echo "Build failed for $service"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Build service with given arguments
|
||||
build_service() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
local build_args=$3
|
||||
local build_script=${4:-$BUILD_SCRIPT_NAME}
|
||||
|
||||
local command="IMAGE_TAG=$version DOCKER_RUNTIME=depot DOCKER_BUILD_ARGS=--push ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash $build_script $build_args"
|
||||
echo "Executing: $command"
|
||||
eval "$command"
|
||||
}
|
||||
|
||||
# Update chart version and commit changes
|
||||
update_chart_version() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
local chart_path="$WORKING_DIR/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
||||
|
||||
# Ensure we're in the original working directory/repository
|
||||
cd "$WORKING_DIR"
|
||||
yq eval ".AppVersion = \"$version\"" -i "$chart_path"
|
||||
git add "$chart_path"
|
||||
git commit -m "Increment $service chart version to $version"
|
||||
git push --set-upstream origin "$BRANCH_NAME"
|
||||
cd -
|
||||
}
|
||||
|
||||
# Main execution
|
||||
main() {
|
||||
setup_git
|
||||
|
||||
# Get backend services list
|
||||
ls backend/cmd >"$BACKEND_SERVICES_FILE"
|
||||
|
||||
# Parse services input (fix for GitHub Actions syntax)
|
||||
echo "Services: ${SERVICES_INPUT:-$1}"
|
||||
IFS=',' read -ra services <<<"${SERVICES_INPUT:-$1}"
|
||||
|
||||
# Process each service
|
||||
for service in "${services[@]}"; do
|
||||
echo "Processing service: $service"
|
||||
cd "$WORKING_DIR"
|
||||
|
||||
local foss_build_args="" ee_build_args="" build_script="$BUILD_SCRIPT_NAME"
|
||||
|
||||
# Determine build configuration based on service type
|
||||
if grep -q "$service" "$BACKEND_SERVICES_FILE"; then
|
||||
# Backend service
|
||||
cd backend
|
||||
foss_build_args="nil $service"
|
||||
ee_build_args="ee $service"
|
||||
else
|
||||
# Non-backend service
|
||||
case "$service" in
|
||||
chalice | alerts | crons)
|
||||
cd "$WORKING_DIR/api"
|
||||
;;
|
||||
*)
|
||||
cd "$service"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Special build scripts for alerts/crons
|
||||
if [[ $service == 'alerts' || $service == 'crons' ]]; then
|
||||
build_script="build_${service}.sh"
|
||||
fi
|
||||
|
||||
ee_build_args="ee"
|
||||
fi
|
||||
|
||||
# Get version and build
|
||||
local version
|
||||
version=$(image_version "$service")
|
||||
|
||||
# Build FOSS and EE versions
|
||||
build_service "$service" "$version" "$foss_build_args"
|
||||
build_service "$service" "${version}-ee" "$ee_build_args"
|
||||
|
||||
# Build managed version for specific services
|
||||
if [[ "$service" != "chalice" && "$service" != "frontend" ]]; then
|
||||
echo "Nothing to build in managed for service $service"
|
||||
else
|
||||
build_managed "$service" "$version"
|
||||
fi
|
||||
|
||||
# Update chart and commit
|
||||
update_chart_version "$service" "$version"
|
||||
done
|
||||
cd "$WORKING_DIR"
|
||||
|
||||
# Cleanup
|
||||
rm -f "$BACKEND_SERVICES_FILE"
|
||||
}
|
||||
|
||||
echo "Working directory: $WORKING_DIR"
|
||||
# Run main function with all arguments
|
||||
main "$SERVICES_INPUT"
|
||||
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: repo-sync/pull-request@v2
|
||||
|
|
|
|||
18
.github/workflows/tracker-tests.yaml
vendored
18
.github/workflows/tracker-tests.yaml
vendored
|
|
@ -22,14 +22,22 @@ jobs:
|
|||
- name: Cache tracker modules
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: tracker/node_modules
|
||||
key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/bun.lock') }}
|
||||
path: tracker/tracker/node_modules
|
||||
key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/bun.lockb') }}
|
||||
restore-keys: |
|
||||
test_tracker_build{{ runner.OS }}-build-
|
||||
test_tracker_build{{ runner.OS }}-
|
||||
- name: Cache tracker-assist modules
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: tracker/tracker-assist/node_modules
|
||||
key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/bun.lockb') }}
|
||||
restore-keys: |
|
||||
test_tracker_build{{ runner.OS }}-build-
|
||||
test_tracker_build{{ runner.OS }}-
|
||||
- name: Setup Testing packages
|
||||
run: |
|
||||
cd tracker
|
||||
cd tracker/tracker
|
||||
bun install
|
||||
- name: Jest tests
|
||||
run: |
|
||||
|
|
@ -39,6 +47,10 @@ jobs:
|
|||
run: |
|
||||
cd tracker/tracker
|
||||
bun run build
|
||||
- name: (TA) Setup Testing packages
|
||||
run: |
|
||||
cd tracker/tracker-assist
|
||||
bun install
|
||||
- name: (TA) Jest tests
|
||||
run: |
|
||||
cd tracker/tracker-assist
|
||||
|
|
|
|||
3
.github/workflows/update-tag.yaml
vendored
3
.github/workflows/update-tag.yaml
vendored
|
|
@ -33,11 +33,10 @@ jobs:
|
|||
- name: Set Remote with GITHUB_TOKEN
|
||||
run: |
|
||||
git config --unset http.https://github.com/.extraheader
|
||||
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git
|
||||
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}
|
||||
|
||||
- name: Push main branch to tag
|
||||
run: |
|
||||
git fetch --tags
|
||||
git checkout main
|
||||
echo "Updating tag ${{ env.LATEST_TAG }} to point to latest commit on main"
|
||||
git push origin HEAD:refs/tags/${{ env.LATEST_TAG }} --force
|
||||
|
|
|
|||
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -7,4 +7,3 @@ node_modules
|
|||
**/*.envrc
|
||||
.idea
|
||||
*.mob*
|
||||
install-state.gz
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
repos:
|
||||
- repo: https://github.com/gitguardian/ggshield
|
||||
rev: v1.38.0
|
||||
rev: v1.14.5
|
||||
hooks:
|
||||
- id: ggshield
|
||||
language_version: python3
|
||||
stages: [pre-commit]
|
||||
stages: [commit]
|
||||
|
|
|
|||
22
api/Pipfile
22
api/Pipfile
|
|
@ -4,24 +4,26 @@ verify_ssl = true
|
|||
name = "pypi"
|
||||
|
||||
[packages]
|
||||
urllib3 = "==2.4.0"
|
||||
urllib3 = "==2.3.0"
|
||||
requests = "==2.32.3"
|
||||
boto3 = "==1.38.16"
|
||||
boto3 = "==1.36.12"
|
||||
pyjwt = "==2.10.1"
|
||||
psycopg2-binary = "==2.9.10"
|
||||
psycopg = {extras = ["binary", "pool"], version = "==3.2.9"}
|
||||
clickhouse-connect = "==0.8.17"
|
||||
elasticsearch = "==9.0.1"
|
||||
psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
|
||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
|
||||
clickhouse-connect = "==0.8.15"
|
||||
elasticsearch = "==8.17.1"
|
||||
jira = "==3.8.0"
|
||||
cachetools = "==5.5.2"
|
||||
fastapi = "==0.115.12"
|
||||
uvicorn = {extras = ["standard"], version = "==0.34.2"}
|
||||
cachetools = "==5.5.1"
|
||||
fastapi = "==0.115.8"
|
||||
uvicorn = {extras = ["standard"], version = "==0.34.0"}
|
||||
python-decouple = "==3.8"
|
||||
pydantic = {extras = ["email"], version = "==2.11.4"}
|
||||
pydantic = {extras = ["email"], version = "==2.10.6"}
|
||||
apscheduler = "==3.11.0"
|
||||
redis = "==6.1.0"
|
||||
redis = "==5.2.1"
|
||||
|
||||
[dev-packages]
|
||||
|
||||
[requires]
|
||||
python_version = "3.12"
|
||||
python_full_version = "3.12.8"
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ from chalicelib.utils import helper
|
|||
from chalicelib.utils import pg_client, ch_client
|
||||
from crons import core_crons, core_dynamic_crons
|
||||
from routers import core, core_dynamic
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_analytics
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
|
||||
|
||||
loglevel = config("LOGLEVEL", default=logging.WARNING)
|
||||
print(f">Loglevel set to: {loglevel}")
|
||||
|
|
@ -129,6 +129,6 @@ app.include_router(spot.public_app)
|
|||
app.include_router(spot.app)
|
||||
app.include_router(spot.app_apikey)
|
||||
|
||||
app.include_router(product_analytics.public_app, prefix="/pa")
|
||||
app.include_router(product_analytics.app, prefix="/pa")
|
||||
app.include_router(product_analytics.app_apikey, prefix="/pa")
|
||||
app.include_router(product_anaytics.public_app)
|
||||
app.include_router(product_anaytics.app)
|
||||
app.include_router(product_anaytics.app_apikey)
|
||||
|
|
|
|||
|
|
@ -1,11 +0,0 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
|
||||
if config("EXP_AUTOCOMPLETE", cast=bool, default=False):
|
||||
logging.info(">>> Using experimental autocomplete")
|
||||
from . import autocomplete_ch as autocomplete
|
||||
else:
|
||||
from . import autocomplete
|
||||
|
|
@ -1,9 +1,10 @@
|
|||
import logging
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import countries, metadata
|
||||
from chalicelib.core import countries, events, metadata
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.utils.event_filter_definition import Event
|
||||
from chalicelib.utils.or_cache import CachedResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
TABLE = "public.autocomplete"
|
||||
|
|
@ -112,10 +113,10 @@ def __generic_query(typename, value_length=None):
|
|||
LIMIT 10;"""
|
||||
|
||||
|
||||
def __generic_autocomplete(event: str):
|
||||
def __generic_autocomplete(event: Event):
|
||||
def f(project_id, value, key=None, source=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = __generic_query(event, value_length=len(value))
|
||||
query = __generic_query(event.ui_type, value_length=len(value))
|
||||
params = {"project_id": project_id, "value": helper.string_to_sql_like(value),
|
||||
"svalue": helper.string_to_sql_like("^" + value)}
|
||||
cur.execute(cur.mogrify(query, params))
|
||||
|
|
@ -148,8 +149,8 @@ def __errors_query(source=None, value_length=None):
|
|||
return f"""((SELECT DISTINCT ON(lg.message)
|
||||
lg.message AS value,
|
||||
source,
|
||||
'{schemas.EventType.ERROR}' AS type
|
||||
FROM events.errors INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{events.EventType.ERROR.ui_type}' AS type
|
||||
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.message ILIKE %(svalue)s
|
||||
|
|
@ -160,8 +161,8 @@ def __errors_query(source=None, value_length=None):
|
|||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
source,
|
||||
'{schemas.EventType.ERROR}' AS type
|
||||
FROM events.errors INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{events.EventType.ERROR.ui_type}' AS type
|
||||
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.name ILIKE %(svalue)s
|
||||
|
|
@ -172,8 +173,8 @@ def __errors_query(source=None, value_length=None):
|
|||
(SELECT DISTINCT ON(lg.message)
|
||||
lg.message AS value,
|
||||
source,
|
||||
'{schemas.EventType.ERROR}' AS type
|
||||
FROM events.errors INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{events.EventType.ERROR.ui_type}' AS type
|
||||
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.message ILIKE %(value)s
|
||||
|
|
@ -184,8 +185,8 @@ def __errors_query(source=None, value_length=None):
|
|||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
source,
|
||||
'{schemas.EventType.ERROR}' AS type
|
||||
FROM events.errors INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{events.EventType.ERROR.ui_type}' AS type
|
||||
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.name ILIKE %(value)s
|
||||
|
|
@ -195,8 +196,8 @@ def __errors_query(source=None, value_length=None):
|
|||
return f"""((SELECT DISTINCT ON(lg.message)
|
||||
lg.message AS value,
|
||||
source,
|
||||
'{schemas.EventType.ERROR}' AS type
|
||||
FROM events.errors INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{events.EventType.ERROR.ui_type}' AS type
|
||||
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.message ILIKE %(svalue)s
|
||||
|
|
@ -207,8 +208,8 @@ def __errors_query(source=None, value_length=None):
|
|||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
source,
|
||||
'{schemas.EventType.ERROR}' AS type
|
||||
FROM events.errors INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{events.EventType.ERROR.ui_type}' AS type
|
||||
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.name ILIKE %(svalue)s
|
||||
|
|
@ -233,8 +234,8 @@ def __search_errors_mobile(project_id, value, key=None, source=None):
|
|||
if len(value) > 2:
|
||||
query = f"""(SELECT DISTINCT ON(lg.reason)
|
||||
lg.reason AS value,
|
||||
'{schemas.EventType.ERROR_MOBILE}' AS type
|
||||
FROM events_common.crashes INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{events.EventType.CRASH_MOBILE.ui_type}' AS type
|
||||
FROM {events.EventType.CRASH_MOBILE.table} INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
|
|
@ -243,8 +244,8 @@ def __search_errors_mobile(project_id, value, key=None, source=None):
|
|||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
'{schemas.EventType.ERROR_MOBILE}' AS type
|
||||
FROM events_common.crashes INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{events.EventType.CRASH_MOBILE.ui_type}' AS type
|
||||
FROM {events.EventType.CRASH_MOBILE.table} INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
|
|
@ -253,8 +254,8 @@ def __search_errors_mobile(project_id, value, key=None, source=None):
|
|||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.reason)
|
||||
lg.reason AS value,
|
||||
'{schemas.EventType.ERROR_MOBILE}' AS type
|
||||
FROM events_common.crashes INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{events.EventType.CRASH_MOBILE.ui_type}' AS type
|
||||
FROM {events.EventType.CRASH_MOBILE.table} INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
|
|
@ -263,8 +264,8 @@ def __search_errors_mobile(project_id, value, key=None, source=None):
|
|||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
'{schemas.EventType.ERROR_MOBILE}' AS type
|
||||
FROM events_common.crashes INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{events.EventType.CRASH_MOBILE.ui_type}' AS type
|
||||
FROM {events.EventType.CRASH_MOBILE.table} INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
|
|
@ -273,8 +274,8 @@ def __search_errors_mobile(project_id, value, key=None, source=None):
|
|||
else:
|
||||
query = f"""(SELECT DISTINCT ON(lg.reason)
|
||||
lg.reason AS value,
|
||||
'{schemas.EventType.ERROR_MOBILE}' AS type
|
||||
FROM events_common.crashes INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{events.EventType.CRASH_MOBILE.ui_type}' AS type
|
||||
FROM {events.EventType.CRASH_MOBILE.table} INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
|
|
@ -283,8 +284,8 @@ def __search_errors_mobile(project_id, value, key=None, source=None):
|
|||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
'{schemas.EventType.ERROR_MOBILE}' AS type
|
||||
FROM events_common.crashes INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{events.EventType.CRASH_MOBILE.ui_type}' AS type
|
||||
FROM {events.EventType.CRASH_MOBILE.table} INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
|
|
@ -376,6 +377,7 @@ def is_top_supported(event_type):
|
|||
return TYPE_TO_COLUMN.get(event_type, False)
|
||||
|
||||
|
||||
@CachedResponse(table="or_cache.autocomplete_top_values", ttl=5 * 60)
|
||||
def get_top_values(project_id, event_type, event_key=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
if schemas.FilterType.has_value(event_type):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
import logging
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import metadata
|
||||
from chalicelib.core.errors import errors_legacy
|
||||
|
|
@ -9,8 +7,6 @@ from chalicelib.utils import ch_client, exp_ch_helper
|
|||
from chalicelib.utils import helper, metrics_helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _multiple_values(values, value_key="value"):
|
||||
query_values = {}
|
||||
|
|
@ -382,9 +378,9 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
|
|||
ORDER BY timestamp) AS sub_table
|
||||
GROUP BY error_id) AS chart_details ON details.error_id=chart_details.error_id;"""
|
||||
|
||||
logger.debug("------------")
|
||||
logger.debug(ch.format(main_ch_query, params))
|
||||
logger.debug("------------")
|
||||
# print("------------")
|
||||
# print(ch.format(main_ch_query, params))
|
||||
# print("------------")
|
||||
query = ch.format(query=main_ch_query, parameters=params)
|
||||
|
||||
rows = ch.execute(query=query)
|
||||
|
|
|
|||
226
api/chalicelib/core/events.py
Normal file
226
api/chalicelib/core/events.py
Normal file
|
|
@ -0,0 +1,226 @@
|
|||
from functools import cache
|
||||
from typing import Optional
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import issues
|
||||
from chalicelib.core.autocomplete import autocomplete
|
||||
from chalicelib.core.sessions import sessions_metas
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.event_filter_definition import SupportedFilter, Event
|
||||
|
||||
|
||||
def get_customs_by_session_id(session_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT
|
||||
c.*,
|
||||
'CUSTOM' AS type
|
||||
FROM events_common.customs AS c
|
||||
WHERE
|
||||
c.session_id = %(session_id)s
|
||||
ORDER BY c.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
return helper.dict_to_camel_case(rows)
|
||||
|
||||
|
||||
def __merge_cells(rows, start, count, replacement):
|
||||
rows[start] = replacement
|
||||
rows = rows[:start + 1] + rows[start + count:]
|
||||
return rows
|
||||
|
||||
|
||||
def __get_grouped_clickrage(rows, session_id, project_id):
|
||||
click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id)
|
||||
if len(click_rage_issues) == 0:
|
||||
return rows
|
||||
|
||||
for c in click_rage_issues:
|
||||
merge_count = c.get("payload")
|
||||
if merge_count is not None:
|
||||
merge_count = merge_count.get("Count", 3)
|
||||
else:
|
||||
merge_count = 3
|
||||
for i in range(len(rows)):
|
||||
if rows[i]["timestamp"] == c["timestamp"]:
|
||||
rows = __merge_cells(rows=rows,
|
||||
start=i,
|
||||
count=merge_count,
|
||||
replacement={**rows[i], "type": "CLICKRAGE", "count": merge_count})
|
||||
break
|
||||
return rows
|
||||
|
||||
|
||||
def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
rows = []
|
||||
if event_type is None or event_type == schemas.EventType.CLICK:
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT
|
||||
c.*,
|
||||
'CLICK' AS type
|
||||
FROM events.clicks AS c
|
||||
WHERE
|
||||
c.session_id = %(session_id)s
|
||||
ORDER BY c.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows += cur.fetchall()
|
||||
if group_clickrage:
|
||||
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
|
||||
if event_type is None or event_type == schemas.EventType.INPUT:
|
||||
cur.execute(cur.mogrify("""
|
||||
SELECT
|
||||
i.*,
|
||||
'INPUT' AS type
|
||||
FROM events.inputs AS i
|
||||
WHERE
|
||||
i.session_id = %(session_id)s
|
||||
ORDER BY i.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows += cur.fetchall()
|
||||
if event_type is None or event_type == schemas.EventType.LOCATION:
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT
|
||||
l.*,
|
||||
l.path AS value,
|
||||
l.path AS url,
|
||||
'LOCATION' AS type
|
||||
FROM events.pages AS l
|
||||
WHERE
|
||||
l.session_id = %(session_id)s
|
||||
ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
|
||||
rows += cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"]))
|
||||
return rows
|
||||
|
||||
|
||||
def _search_tags(project_id, value, key=None, source=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = f"""
|
||||
SELECT public.tags.name
|
||||
'TAG' AS type
|
||||
FROM public.tags
|
||||
WHERE public.tags.project_id = %(project_id)s
|
||||
ORDER BY SIMILARITY(public.tags.name, %(value)s) DESC
|
||||
LIMIT 10
|
||||
"""
|
||||
query = cur.mogrify(query, {'project_id': project_id, 'value': value})
|
||||
cur.execute(query)
|
||||
results = helper.list_to_camel_case(cur.fetchall())
|
||||
return results
|
||||
|
||||
|
||||
class EventType:
|
||||
CLICK = Event(ui_type=schemas.EventType.CLICK, table="events.clicks", column="label")
|
||||
INPUT = Event(ui_type=schemas.EventType.INPUT, table="events.inputs", column="label")
|
||||
LOCATION = Event(ui_type=schemas.EventType.LOCATION, table="events.pages", column="path")
|
||||
CUSTOM = Event(ui_type=schemas.EventType.CUSTOM, table="events_common.customs", column="name")
|
||||
REQUEST = Event(ui_type=schemas.EventType.REQUEST, table="events_common.requests", column="path")
|
||||
GRAPHQL = Event(ui_type=schemas.EventType.GRAPHQL, table="events.graphql", column="name")
|
||||
STATEACTION = Event(ui_type=schemas.EventType.STATE_ACTION, table="events.state_actions", column="name")
|
||||
TAG = Event(ui_type=schemas.EventType.TAG, table="events.tags", column="tag_id")
|
||||
ERROR = Event(ui_type=schemas.EventType.ERROR, table="events.errors",
|
||||
column=None) # column=None because errors are searched by name or message
|
||||
METADATA = Event(ui_type=schemas.FilterType.METADATA, table="public.sessions", column=None)
|
||||
# MOBILE
|
||||
CLICK_MOBILE = Event(ui_type=schemas.EventType.CLICK_MOBILE, table="events_ios.taps", column="label")
|
||||
INPUT_MOBILE = Event(ui_type=schemas.EventType.INPUT_MOBILE, table="events_ios.inputs", column="label")
|
||||
VIEW_MOBILE = Event(ui_type=schemas.EventType.VIEW_MOBILE, table="events_ios.views", column="name")
|
||||
SWIPE_MOBILE = Event(ui_type=schemas.EventType.SWIPE_MOBILE, table="events_ios.swipes", column="label")
|
||||
CUSTOM_MOBILE = Event(ui_type=schemas.EventType.CUSTOM_MOBILE, table="events_common.customs", column="name")
|
||||
REQUEST_MOBILE = Event(ui_type=schemas.EventType.REQUEST_MOBILE, table="events_common.requests", column="path")
|
||||
CRASH_MOBILE = Event(ui_type=schemas.EventType.ERROR_MOBILE, table="events_common.crashes",
|
||||
column=None) # column=None because errors are searched by name or message
|
||||
|
||||
|
||||
@cache
|
||||
def supported_types():
|
||||
return {
|
||||
EventType.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK),
|
||||
query=autocomplete.__generic_query(typename=EventType.CLICK.ui_type)),
|
||||
EventType.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT),
|
||||
query=autocomplete.__generic_query(typename=EventType.INPUT.ui_type)),
|
||||
EventType.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.LOCATION),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.LOCATION.ui_type)),
|
||||
EventType.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.CUSTOM.ui_type)),
|
||||
EventType.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.REQUEST.ui_type)),
|
||||
EventType.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.GRAPHQL),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.GRAPHQL.ui_type)),
|
||||
EventType.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.STATEACTION),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.STATEACTION.ui_type)),
|
||||
EventType.TAG.ui_type: SupportedFilter(get=_search_tags, query=None),
|
||||
EventType.ERROR.ui_type: SupportedFilter(get=autocomplete.__search_errors,
|
||||
query=None),
|
||||
EventType.METADATA.ui_type: SupportedFilter(get=autocomplete.__search_metadata,
|
||||
query=None),
|
||||
# MOBILE
|
||||
EventType.CLICK_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.CLICK_MOBILE.ui_type)),
|
||||
EventType.SWIPE_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.SWIPE_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.SWIPE_MOBILE.ui_type)),
|
||||
EventType.INPUT_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.INPUT_MOBILE.ui_type)),
|
||||
EventType.VIEW_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.VIEW_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.VIEW_MOBILE.ui_type)),
|
||||
EventType.CUSTOM_MOBILE.ui_type: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete(EventType.CUSTOM_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.CUSTOM_MOBILE.ui_type)),
|
||||
EventType.REQUEST_MOBILE.ui_type: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete(EventType.REQUEST_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.REQUEST_MOBILE.ui_type)),
|
||||
EventType.CRASH_MOBILE.ui_type: SupportedFilter(get=autocomplete.__search_errors_mobile,
|
||||
query=None),
|
||||
}
|
||||
|
||||
|
||||
def get_errors_by_session_id(session_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(f"""\
|
||||
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
|
||||
FROM {EventType.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
|
||||
WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
|
||||
ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
|
||||
errors = cur.fetchall()
|
||||
for e in errors:
|
||||
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
|
||||
return helper.list_to_camel_case(errors)
|
||||
|
||||
|
||||
def search(text, event_type, project_id, source, key):
|
||||
if not event_type:
|
||||
return {"data": autocomplete.__get_autocomplete_table(text, project_id)}
|
||||
|
||||
if event_type in supported_types().keys():
|
||||
rows = supported_types()[event_type].get(project_id=project_id, value=text, key=key, source=source)
|
||||
elif event_type + "_MOBILE" in supported_types().keys():
|
||||
rows = supported_types()[event_type + "_MOBILE"].get(project_id=project_id, value=text, key=key, source=source)
|
||||
elif event_type in sessions_metas.supported_types().keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
elif event_type.endswith("_IOS") \
|
||||
and event_type[:-len("_IOS")] in sessions_metas.supported_types().keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
elif event_type.endswith("_MOBILE") \
|
||||
and event_type[:-len("_MOBILE")] in sessions_metas.supported_types().keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
else:
|
||||
return {"errors": ["unsupported event"]}
|
||||
|
||||
return {"data": rows}
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if config("EXP_EVENTS", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental events replay")
|
||||
from . import events_ch as events
|
||||
else:
|
||||
from . import events_pg as events
|
||||
|
|
@ -1,97 +0,0 @@
|
|||
from chalicelib.utils import ch_client
|
||||
from .events_pg import *
|
||||
|
||||
|
||||
def __explode_properties(rows):
|
||||
for i in range(len(rows)):
|
||||
rows[i] = {**rows[i], **rows[i]["$properties"]}
|
||||
rows[i].pop("$properties")
|
||||
return rows
|
||||
|
||||
|
||||
def get_customs_by_session_id(session_id, project_id):
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
rows = cur.execute(""" \
|
||||
SELECT `$properties`,
|
||||
created_at,
|
||||
'CUSTOM' AS type
|
||||
FROM product_analytics.events
|
||||
WHERE session_id = %(session_id)s
|
||||
AND NOT `$auto_captured`
|
||||
AND `$event_name`!='INCIDENT'
|
||||
ORDER BY created_at;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
rows = __explode_properties(rows)
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
def __merge_cells(rows, start, count, replacement):
|
||||
rows[start] = replacement
|
||||
rows = rows[:start + 1] + rows[start + count:]
|
||||
return rows
|
||||
|
||||
|
||||
def __get_grouped_clickrage(rows, session_id, project_id):
|
||||
click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id)
|
||||
if len(click_rage_issues) == 0:
|
||||
return rows
|
||||
|
||||
for c in click_rage_issues:
|
||||
merge_count = c.get("payload")
|
||||
if merge_count is not None:
|
||||
merge_count = merge_count.get("Count", 3)
|
||||
else:
|
||||
merge_count = 3
|
||||
for i in range(len(rows)):
|
||||
if rows[i]["created_at"] == c["createdAt"]:
|
||||
rows = __merge_cells(rows=rows,
|
||||
start=i,
|
||||
count=merge_count,
|
||||
replacement={**rows[i], "type": "CLICKRAGE", "count": merge_count})
|
||||
break
|
||||
return rows
|
||||
|
||||
|
||||
def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
select_events = ('CLICK', 'INPUT', 'LOCATION')
|
||||
if event_type is not None:
|
||||
select_events = (event_type,)
|
||||
query = cur.format(query=""" \
|
||||
SELECT created_at,
|
||||
`$properties`,
|
||||
`$event_name` AS type
|
||||
FROM product_analytics.events
|
||||
WHERE session_id = %(session_id)s
|
||||
AND `$event_name` IN %(select_events)s
|
||||
AND `$auto_captured`
|
||||
ORDER BY created_at;""",
|
||||
parameters={"project_id": project_id, "session_id": session_id,
|
||||
"select_events": select_events})
|
||||
rows = cur.execute(query)
|
||||
rows = __explode_properties(rows)
|
||||
if group_clickrage and 'CLICK' in select_events:
|
||||
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
|
||||
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
rows = sorted(rows, key=lambda k: k["createdAt"])
|
||||
return rows
|
||||
|
||||
|
||||
def get_incidents_by_session_id(session_id, project_id):
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = cur.format(query=""" \
|
||||
SELECT created_at,
|
||||
`$properties`,
|
||||
`$event_name` AS type
|
||||
FROM product_analytics.events
|
||||
WHERE session_id = %(session_id)s
|
||||
AND `$event_name` = 'INCIDENT'
|
||||
AND `$auto_captured`
|
||||
ORDER BY created_at;""",
|
||||
parameters={"project_id": project_id, "session_id": session_id})
|
||||
rows = cur.execute(query)
|
||||
rows = __explode_properties(rows)
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
rows = sorted(rows, key=lambda k: k["createdAt"])
|
||||
return rows
|
||||
|
|
@ -1,209 +0,0 @@
|
|||
import logging
|
||||
from functools import cache
|
||||
from typing import Optional
|
||||
|
||||
import schemas
|
||||
from chalicelib.core.autocomplete import autocomplete
|
||||
from chalicelib.core.issues import issues
|
||||
from chalicelib.core.sessions import sessions_metas
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.event_filter_definition import SupportedFilter
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_customs_by_session_id(session_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(""" \
|
||||
SELECT c.*,
|
||||
'CUSTOM' AS type
|
||||
FROM events_common.customs AS c
|
||||
WHERE c.session_id = %(session_id)s
|
||||
ORDER BY c.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
def __merge_cells(rows, start, count, replacement):
|
||||
rows[start] = replacement
|
||||
rows = rows[:start + 1] + rows[start + count:]
|
||||
return rows
|
||||
|
||||
|
||||
def __get_grouped_clickrage(rows, session_id, project_id):
|
||||
click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id)
|
||||
if len(click_rage_issues) == 0:
|
||||
return rows
|
||||
|
||||
for c in click_rage_issues:
|
||||
merge_count = c.get("payload")
|
||||
if merge_count is not None:
|
||||
merge_count = merge_count.get("Count", 3)
|
||||
else:
|
||||
merge_count = 3
|
||||
for i in range(len(rows)):
|
||||
if rows[i]["timestamp"] == c["timestamp"]:
|
||||
rows = __merge_cells(rows=rows,
|
||||
start=i,
|
||||
count=merge_count,
|
||||
replacement={**rows[i], "type": "CLICKRAGE", "count": merge_count})
|
||||
break
|
||||
return rows
|
||||
|
||||
|
||||
def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
rows = []
|
||||
if event_type is None or event_type == schemas.EventType.CLICK:
|
||||
cur.execute(cur.mogrify(""" \
|
||||
SELECT c.*,
|
||||
'CLICK' AS type
|
||||
FROM events.clicks AS c
|
||||
WHERE c.session_id = %(session_id)s
|
||||
ORDER BY c.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows += cur.fetchall()
|
||||
if group_clickrage:
|
||||
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
|
||||
if event_type is None or event_type == schemas.EventType.INPUT:
|
||||
cur.execute(cur.mogrify("""
|
||||
SELECT i.*,
|
||||
'INPUT' AS type
|
||||
FROM events.inputs AS i
|
||||
WHERE i.session_id = %(session_id)s
|
||||
ORDER BY i.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows += cur.fetchall()
|
||||
if event_type is None or event_type == schemas.EventType.LOCATION:
|
||||
cur.execute(cur.mogrify(""" \
|
||||
SELECT l.*,
|
||||
l.path AS value,
|
||||
l.path AS url,
|
||||
'LOCATION' AS type
|
||||
FROM events.pages AS l
|
||||
WHERE
|
||||
l.session_id = %(session_id)s
|
||||
ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
|
||||
rows += cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"]))
|
||||
return rows
|
||||
|
||||
|
||||
def _search_tags(project_id, value, key=None, source=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = f"""
|
||||
SELECT public.tags.name
|
||||
'TAG' AS type
|
||||
FROM public.tags
|
||||
WHERE public.tags.project_id = %(project_id)s
|
||||
ORDER BY SIMILARITY(public.tags.name, %(value)s) DESC
|
||||
LIMIT 10
|
||||
"""
|
||||
query = cur.mogrify(query, {'project_id': project_id, 'value': value})
|
||||
cur.execute(query)
|
||||
results = helper.list_to_camel_case(cur.fetchall())
|
||||
return results
|
||||
|
||||
|
||||
@cache
|
||||
def supported_types():
|
||||
return {
|
||||
schemas.EventType.CLICK: SupportedFilter(get=autocomplete.__generic_autocomplete(schemas.EventType.CLICK),
|
||||
query=autocomplete.__generic_query(typename=schemas.EventType.CLICK)),
|
||||
schemas.EventType.INPUT: SupportedFilter(get=autocomplete.__generic_autocomplete(schemas.EventType.INPUT),
|
||||
query=autocomplete.__generic_query(typename=schemas.EventType.INPUT)),
|
||||
schemas.EventType.LOCATION: SupportedFilter(get=autocomplete.__generic_autocomplete(schemas.EventType.LOCATION),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.LOCATION)),
|
||||
schemas.EventType.CUSTOM: SupportedFilter(get=autocomplete.__generic_autocomplete(schemas.EventType.CUSTOM),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.CUSTOM)),
|
||||
schemas.EventType.REQUEST: SupportedFilter(get=autocomplete.__generic_autocomplete(schemas.EventType.REQUEST),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.REQUEST)),
|
||||
schemas.EventType.GRAPHQL: SupportedFilter(get=autocomplete.__generic_autocomplete(schemas.EventType.GRAPHQL),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.GRAPHQL)),
|
||||
schemas.EventType.STATE_ACTION: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete(schemas.EventType.STATEACTION),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.STATE_ACTION)),
|
||||
schemas.EventType.TAG: SupportedFilter(get=_search_tags, query=None),
|
||||
schemas.EventType.ERROR: SupportedFilter(get=autocomplete.__search_errors,
|
||||
query=None),
|
||||
schemas.FilterType.METADATA: SupportedFilter(get=autocomplete.__search_metadata,
|
||||
query=None),
|
||||
# MOBILE
|
||||
schemas.EventType.CLICK_MOBILE: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete(schemas.EventType.CLICK_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.CLICK_MOBILE)),
|
||||
schemas.EventType.SWIPE_MOBILE: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete(schemas.EventType.SWIPE_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.SWIPE_MOBILE)),
|
||||
schemas.EventType.INPUT_MOBILE: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete(schemas.EventType.INPUT_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.INPUT_MOBILE)),
|
||||
schemas.EventType.VIEW_MOBILE: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete(schemas.EventType.VIEW_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.VIEW_MOBILE)),
|
||||
schemas.EventType.CUSTOM_MOBILE: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete(schemas.EventType.CUSTOM_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.CUSTOM_MOBILE)),
|
||||
schemas.EventType.REQUEST_MOBILE: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete(schemas.EventType.REQUEST_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=schemas.EventType.REQUEST_MOBILE)),
|
||||
schemas.EventType.ERROR_MOBILE: SupportedFilter(get=autocomplete.__search_errors_mobile,
|
||||
query=None),
|
||||
}
|
||||
|
||||
|
||||
def get_errors_by_session_id(session_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(f"""\
|
||||
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
|
||||
FROM events.errors AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
|
||||
WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
|
||||
ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
|
||||
errors = cur.fetchall()
|
||||
for e in errors:
|
||||
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
|
||||
return helper.list_to_camel_case(errors)
|
||||
|
||||
|
||||
def get_incidents_by_session_id(session_id, project_id):
|
||||
logger.warning("INCIDENTS not supported in PG")
|
||||
return []
|
||||
|
||||
|
||||
def search(text, event_type, project_id, source, key):
|
||||
if not event_type:
|
||||
return {"data": autocomplete.__get_autocomplete_table(text, project_id)}
|
||||
|
||||
if event_type in supported_types().keys():
|
||||
rows = supported_types()[event_type].get(project_id=project_id, value=text, key=key, source=source)
|
||||
elif event_type + "_MOBILE" in supported_types().keys():
|
||||
rows = supported_types()[event_type + "_MOBILE"].get(project_id=project_id, value=text, key=key, source=source)
|
||||
elif event_type in sessions_metas.supported_types().keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
elif event_type.endswith("_IOS") \
|
||||
and event_type[:-len("_IOS")] in sessions_metas.supported_types().keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
elif event_type.endswith("_MOBILE") \
|
||||
and event_type[:-len("_MOBILE")] in sessions_metas.supported_types().keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
else:
|
||||
return {"errors": ["unsupported event"]}
|
||||
|
||||
return {"data": rows}
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
from chalicelib.utils import pg_client, helper
|
||||
from . import events
|
||||
from chalicelib.core import events
|
||||
|
||||
|
||||
def get_customs_by_session_id(session_id, project_id):
|
||||
|
|
@ -58,7 +58,7 @@ def get_crashes_by_session_id(session_id):
|
|||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(f"""
|
||||
SELECT cr.*,uc.*, cr.timestamp - s.start_ts AS time
|
||||
FROM events_common.crashes AS cr
|
||||
FROM {events.EventType.CRASH_MOBILE.table} AS cr
|
||||
INNER JOIN public.crashes_ios AS uc USING (crash_ios_id)
|
||||
INNER JOIN public.sessions AS s USING (session_id)
|
||||
WHERE
|
||||
|
|
@ -50,8 +50,8 @@ class JIRAIntegration(base.BaseIntegration):
|
|||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""SELECT username, token, url
|
||||
FROM public.jira_cloud
|
||||
WHERE user_id=%(user_id)s;""",
|
||||
FROM public.jira_cloud
|
||||
WHERE user_id = %(user_id)s;""",
|
||||
{"user_id": self._user_id})
|
||||
)
|
||||
data = helper.dict_to_camel_case(cur.fetchone())
|
||||
|
|
@ -95,10 +95,9 @@ class JIRAIntegration(base.BaseIntegration):
|
|||
def add(self, username, token, url, obfuscate=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
INSERT INTO public.jira_cloud(username, token, user_id,url)
|
||||
VALUES (%(username)s, %(token)s, %(user_id)s,%(url)s)
|
||||
RETURNING username, token, url;""",
|
||||
cur.mogrify(""" \
|
||||
INSERT INTO public.jira_cloud(username, token, user_id, url)
|
||||
VALUES (%(username)s, %(token)s, %(user_id)s, %(url)s) RETURNING username, token, url;""",
|
||||
{"user_id": self._user_id, "username": username,
|
||||
"token": token, "url": url})
|
||||
)
|
||||
|
|
@ -112,9 +111,10 @@ class JIRAIntegration(base.BaseIntegration):
|
|||
def delete(self):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
DELETE FROM public.jira_cloud
|
||||
WHERE user_id=%(user_id)s;""",
|
||||
cur.mogrify(""" \
|
||||
DELETE
|
||||
FROM public.jira_cloud
|
||||
WHERE user_id = %(user_id)s;""",
|
||||
{"user_id": self._user_id})
|
||||
)
|
||||
return {"state": "success"}
|
||||
|
|
@ -125,7 +125,7 @@ class JIRAIntegration(base.BaseIntegration):
|
|||
changes={
|
||||
"username": data.username,
|
||||
"token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \
|
||||
else self.integration.token,
|
||||
else self.integration["token"],
|
||||
"url": str(data.url)
|
||||
},
|
||||
obfuscate=True
|
||||
|
|
|
|||
|
|
@ -4,11 +4,12 @@ from chalicelib.utils import pg_client, helper
|
|||
def get(project_id, issue_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
""" \
|
||||
SELECT *
|
||||
"""\
|
||||
SELECT
|
||||
*
|
||||
FROM public.issues
|
||||
WHERE project_id = %(project_id)s
|
||||
AND issue_id = %(issue_id)s;""",
|
||||
AND issue_id = %(issue_id)s;""",
|
||||
{"project_id": project_id, "issue_id": issue_id}
|
||||
)
|
||||
cur.execute(query=query)
|
||||
|
|
@ -34,29 +35,6 @@ def get_by_session_id(session_id, project_id, issue_type=None):
|
|||
return helper.list_to_camel_case(cur.fetchall())
|
||||
|
||||
|
||||
# To reduce the number of issues in the replay;
|
||||
# will be removed once we agree on how to show issues
|
||||
def reduce_issues(issues_list):
|
||||
if issues_list is None:
|
||||
return None
|
||||
i = 0
|
||||
# remove same-type issues if the time between them is <2s
|
||||
while i < len(issues_list) - 1:
|
||||
for j in range(i + 1, len(issues_list)):
|
||||
if issues_list[i]["type"] == issues_list[j]["type"]:
|
||||
break
|
||||
else:
|
||||
i += 1
|
||||
break
|
||||
|
||||
if issues_list[i]["timestamp"] - issues_list[j]["timestamp"] < 2000:
|
||||
issues_list.pop(j)
|
||||
else:
|
||||
i += 1
|
||||
|
||||
return issues_list
|
||||
|
||||
|
||||
def get_all_types():
|
||||
return [
|
||||
{
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if config("EXP_EVENTS", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental issues")
|
||||
from . import issues_ch as issues
|
||||
else:
|
||||
from . import issues_pg as issues
|
||||
|
|
@ -1,56 +0,0 @@
|
|||
from chalicelib.utils import ch_client, helper
|
||||
import datetime
|
||||
from .issues_pg import get_all_types
|
||||
|
||||
|
||||
def get(project_id, issue_id):
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = cur.format(query=""" \
|
||||
SELECT *
|
||||
FROM product_analytics.events
|
||||
WHERE project_id = %(project_id)s
|
||||
AND issue_id = %(issue_id)s;""",
|
||||
parameters={"project_id": project_id, "issue_id": issue_id})
|
||||
data = cur.execute(query=query)
|
||||
if data is not None and len(data) > 0:
|
||||
data = data[0]
|
||||
data["title"] = helper.get_issue_title(data["type"])
|
||||
return helper.dict_to_camel_case(data)
|
||||
|
||||
|
||||
def get_by_session_id(session_id, project_id, issue_type=None):
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = cur.format(query=f"""\
|
||||
SELECT *
|
||||
FROM product_analytics.events
|
||||
WHERE session_id = %(session_id)s
|
||||
AND project_id= %(project_id)s
|
||||
AND `$event_name`='ISSUE'
|
||||
{"AND issue_type = %(type)s" if issue_type is not None else ""}
|
||||
ORDER BY created_at;""",
|
||||
parameters={"session_id": session_id, "project_id": project_id, "type": issue_type})
|
||||
data = cur.execute(query)
|
||||
return helper.list_to_camel_case(data)
|
||||
|
||||
|
||||
# To reduce the number of issues in the replay;
|
||||
# will be removed once we agree on how to show issues
|
||||
def reduce_issues(issues_list):
|
||||
if issues_list is None:
|
||||
return None
|
||||
i = 0
|
||||
# remove same-type issues if the time between them is <2s
|
||||
while i < len(issues_list) - 1:
|
||||
for j in range(i + 1, len(issues_list)):
|
||||
if issues_list[i]["issueType"] == issues_list[j]["issueType"]:
|
||||
break
|
||||
else:
|
||||
i += 1
|
||||
break
|
||||
|
||||
if issues_list[i]["createdAt"] - issues_list[j]["createdAt"] < datetime.timedelta(seconds=2):
|
||||
issues_list.pop(j)
|
||||
else:
|
||||
i += 1
|
||||
|
||||
return issues_list
|
||||
|
|
@ -241,25 +241,3 @@ def get_colname_by_key(project_id, key):
|
|||
return None
|
||||
|
||||
return index_to_colname(meta_keys[key])
|
||||
|
||||
|
||||
def get_for_filters(project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""SELECT {",".join(column_names())}
|
||||
FROM public.projects
|
||||
WHERE project_id = %(project_id)s
|
||||
AND deleted_at ISNULL
|
||||
LIMIT 1;""", {"project_id": project_id})
|
||||
cur.execute(query=query)
|
||||
metas = cur.fetchone()
|
||||
results = []
|
||||
if metas is not None:
|
||||
for i, k in enumerate(metas.keys()):
|
||||
if metas[k] is not None:
|
||||
results.append({"id": f"meta_{i}",
|
||||
"name": k,
|
||||
"displayName": metas[k],
|
||||
"possibleTypes": ["String"],
|
||||
"autoCaptured": False,
|
||||
"icon": None})
|
||||
return {"total": len(results), "list": results}
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import logging
|
|||
from fastapi import HTTPException, status
|
||||
|
||||
import schemas
|
||||
from chalicelib.core.issues import issues
|
||||
from chalicelib.core import issues
|
||||
from chalicelib.core.errors import errors
|
||||
from chalicelib.core.metrics import heatmaps, product_analytics, funnels
|
||||
from chalicelib.core.sessions import sessions, sessions_search
|
||||
|
|
@ -61,9 +61,6 @@ def get_heat_map_chart(project: schemas.ProjectContext, user_id, data: schemas.C
|
|||
return None
|
||||
data.series[0].filter.filters += data.series[0].filter.events
|
||||
data.series[0].filter.events = []
|
||||
print(">>>>>>>>>>>>>>>>>>>>>>>>><")
|
||||
print(data.series[0].filter.model_dump())
|
||||
print(">>>>>>>>>>>>>>>>>>>>>>>>><")
|
||||
return heatmaps.search_short_session(project_id=project.project_id, user_id=user_id,
|
||||
data=schemas.HeatMapSessionsSearch(
|
||||
**data.series[0].filter.model_dump()),
|
||||
|
|
@ -244,13 +241,14 @@ def create_card(project: schemas.ProjectContext, user_id, data: schemas.CardSche
|
|||
params["card_info"] = json.dumps(params["card_info"])
|
||||
|
||||
query = """INSERT INTO metrics (project_id, user_id, name, is_public,
|
||||
view_type, metric_type, metric_of, metric_value,
|
||||
metric_format, default_config, thumbnail, data,
|
||||
card_info)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
|
||||
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
|
||||
%(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s,
|
||||
%(card_info)s) RETURNING metric_id"""
|
||||
view_type, metric_type, metric_of, metric_value,
|
||||
metric_format, default_config, thumbnail, data,
|
||||
card_info)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
|
||||
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
|
||||
%(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s,
|
||||
%(card_info)s)
|
||||
RETURNING metric_id"""
|
||||
if len(data.series) > 0:
|
||||
query = f"""WITH m AS ({query})
|
||||
INSERT INTO metric_series(metric_id, index, name, filter)
|
||||
|
|
@ -527,13 +525,13 @@ def get_all(project_id, user_id):
|
|||
def delete_card(project_id, metric_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(""" \
|
||||
UPDATE public.metrics
|
||||
SET deleted_at = timezone('utc'::text, now()),
|
||||
edited_at = timezone('utc'::text, now())
|
||||
WHERE project_id = %(project_id)s
|
||||
AND metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public) RETURNING data;""",
|
||||
cur.mogrify("""\
|
||||
UPDATE public.metrics
|
||||
SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now())
|
||||
WHERE project_id = %(project_id)s
|
||||
AND metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public)
|
||||
RETURNING data;""",
|
||||
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id})
|
||||
)
|
||||
|
||||
|
|
@ -617,14 +615,13 @@ def get_series_for_alert(project_id, user_id):
|
|||
FALSE AS predefined,
|
||||
metric_id,
|
||||
series_id
|
||||
FROM metric_series
|
||||
INNER JOIN metrics USING (metric_id)
|
||||
WHERE metrics.deleted_at ISNULL
|
||||
AND metrics.project_id = %(project_id)s
|
||||
AND metrics.metric_type = 'timeseries'
|
||||
AND (user_id = %(user_id)s
|
||||
OR is_public)
|
||||
ORDER BY name;""",
|
||||
FROM metric_series
|
||||
INNER JOIN metrics USING (metric_id)
|
||||
WHERE metrics.deleted_at ISNULL
|
||||
AND metrics.project_id = %(project_id)s
|
||||
AND metrics.metric_type = 'timeseries'
|
||||
AND (user_id = %(user_id)s OR is_public)
|
||||
ORDER BY name;""",
|
||||
{"project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
)
|
||||
|
|
@ -635,11 +632,11 @@ def get_series_for_alert(project_id, user_id):
|
|||
def change_state(project_id, metric_id, user_id, status):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(""" \
|
||||
UPDATE public.metrics
|
||||
SET active = %(status)s
|
||||
WHERE metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
cur.mogrify("""\
|
||||
UPDATE public.metrics
|
||||
SET active = %(status)s
|
||||
WHERE metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
{"metric_id": metric_id, "status": status, "user_id": user_id})
|
||||
)
|
||||
return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id)
|
||||
|
|
@ -677,8 +674,7 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
|
|||
"issue": issue}
|
||||
|
||||
|
||||
def make_chart_from_card(project: schemas.ProjectContext, user_id, metric_id,
|
||||
data: schemas.CardSessionsSchema, for_dashboard: bool = False):
|
||||
def make_chart_from_card(project: schemas.ProjectContext, user_id, metric_id, data: schemas.CardSessionsSchema):
|
||||
raw_metric: dict = get_card(metric_id=metric_id, project_id=project.project_id, user_id=user_id, include_data=True)
|
||||
|
||||
if raw_metric is None:
|
||||
|
|
@ -697,8 +693,7 @@ def make_chart_from_card(project: schemas.ProjectContext, user_id, metric_id,
|
|||
return heatmaps.search_short_session(project_id=project.project_id,
|
||||
data=schemas.HeatMapSessionsSearch(**metric.model_dump()),
|
||||
user_id=user_id)
|
||||
elif metric.metric_type == schemas.MetricType.PATH_ANALYSIS and for_dashboard:
|
||||
metric.hide_excess = True
|
||||
|
||||
return get_chart(project=project, data=metric, user_id=user_id)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from chalicelib.utils import helper
|
|||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
|
||||
def filter_stages(stages: List[schemas.SessionSearchEventSchema]):
|
||||
def filter_stages(stages: List[schemas.SessionSearchEventSchema2]):
|
||||
ALLOW_TYPES = [schemas.EventType.CLICK, schemas.EventType.INPUT,
|
||||
schemas.EventType.LOCATION, schemas.EventType.CUSTOM,
|
||||
schemas.EventType.CLICK_MOBILE, schemas.EventType.INPUT_MOBILE,
|
||||
|
|
@ -15,10 +15,10 @@ def filter_stages(stages: List[schemas.SessionSearchEventSchema]):
|
|||
|
||||
|
||||
def __parse_events(f_events: List[dict]):
|
||||
return [schemas.SessionSearchEventSchema.parse_obj(e) for e in f_events]
|
||||
return [schemas.SessionSearchEventSchema2.parse_obj(e) for e in f_events]
|
||||
|
||||
|
||||
def __fix_stages(f_events: List[schemas.SessionSearchEventSchema]):
|
||||
def __fix_stages(f_events: List[schemas.SessionSearchEventSchema2]):
|
||||
if f_events is None:
|
||||
return
|
||||
events = []
|
||||
|
|
|
|||
|
|
@ -160,7 +160,7 @@ s.start_ts,
|
|||
s.duration"""
|
||||
|
||||
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema | None, session_id: str, project_id: int,
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema2 | None, session_id: str, project_id: int,
|
||||
start_time: int,
|
||||
end_time: int) -> str | None:
|
||||
full_args = {
|
||||
|
|
@ -240,13 +240,13 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i
|
|||
value=[schemas.PlatformType.DESKTOP],
|
||||
operator=schemas.SearchEventOperator.IS))
|
||||
if not location_condition:
|
||||
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
if no_click:
|
||||
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
|
||||
value=[0],
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import logging
|
|||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core.events import events
|
||||
from chalicelib.core import events
|
||||
from chalicelib.core.metrics.modules import sessions, sessions_mobs
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
|
|
@ -24,9 +24,8 @@ def get_by_url(project_id, data: schemas.GetHeatMapPayloadSchema):
|
|||
"main_events.`$event_name` = 'CLICK'",
|
||||
"isNotNull(JSON_VALUE(CAST(main_events.`$properties` AS String), '$.normalized_x'))"
|
||||
]
|
||||
if data.operator == schemas.SearchEventOperator.PATTERN:
|
||||
constraints.append("match(main_events.`$properties`.url_path'.:String,%(url)s)")
|
||||
elif data.operator == schemas.SearchEventOperator.IS:
|
||||
|
||||
if data.operator == schemas.SearchEventOperator.IS:
|
||||
constraints.append("JSON_VALUE(CAST(main_events.`$properties` AS String), '$.url_path') = %(url)s")
|
||||
else:
|
||||
constraints.append("JSON_VALUE(CAST(main_events.`$properties` AS String), '$.url_path') ILIKE %(url)s")
|
||||
|
|
@ -180,7 +179,7 @@ toUnixTimestamp(s.datetime)*1000 AS start_ts,
|
|||
s.duration AS duration"""
|
||||
|
||||
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema | None, session_id: str, project_id: int,
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema2 | None, session_id: str, project_id: int,
|
||||
start_time: int,
|
||||
end_time: int) -> str | None:
|
||||
full_args = {
|
||||
|
|
@ -263,13 +262,13 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i
|
|||
value=[schemas.PlatformType.DESKTOP],
|
||||
operator=schemas.SearchEventOperator.IS))
|
||||
if not location_condition:
|
||||
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
if no_click:
|
||||
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
|
||||
value=[0],
|
||||
|
|
|
|||
|
|
@ -7,8 +7,7 @@ from typing import List
|
|||
from psycopg2.extras import RealDictRow
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import metadata
|
||||
from chalicelib.core.events import events
|
||||
from chalicelib.core import events, metadata
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
|
|
@ -77,10 +76,10 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
|
|||
values["maxDuration"] = f.value[1]
|
||||
elif filter_type == schemas.FilterType.REFERRER:
|
||||
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
|
||||
filter_extra_from = [f"INNER JOIN {"events.pages"} AS p USING(session_id)"]
|
||||
filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"]
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
|
||||
elif filter_type == schemas.FilterType.METADATA:
|
||||
elif filter_type == events.EventType.METADATA.ui_type:
|
||||
if meta_keys is None:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
meta_keys = {m["key"]: m["index"] for m in meta_keys}
|
||||
|
|
@ -122,31 +121,31 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
|
|||
op = sh.get_sql_operator(s.operator)
|
||||
# event_type = s["type"].upper()
|
||||
event_type = s.type
|
||||
if event_type == schemas.EventType.CLICK:
|
||||
next_table = "events.clicks"
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.INPUT:
|
||||
next_table = "events.inputs"
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.LOCATION:
|
||||
next_table = "events.pages"
|
||||
next_col_name = "path"
|
||||
elif event_type == schemas.EventType.CUSTOM:
|
||||
next_table = "events_common.customs"
|
||||
next_col_name = "name"
|
||||
if event_type == events.EventType.CLICK.ui_type:
|
||||
next_table = events.EventType.CLICK.table
|
||||
next_col_name = events.EventType.CLICK.column
|
||||
elif event_type == events.EventType.INPUT.ui_type:
|
||||
next_table = events.EventType.INPUT.table
|
||||
next_col_name = events.EventType.INPUT.column
|
||||
elif event_type == events.EventType.LOCATION.ui_type:
|
||||
next_table = events.EventType.LOCATION.table
|
||||
next_col_name = events.EventType.LOCATION.column
|
||||
elif event_type == events.EventType.CUSTOM.ui_type:
|
||||
next_table = events.EventType.CUSTOM.table
|
||||
next_col_name = events.EventType.CUSTOM.column
|
||||
# IOS --------------
|
||||
elif event_type == schemas.EventType.CLICK_MOBILE:
|
||||
next_table = "events_ios.taps"
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.INPUT_MOBILE:
|
||||
next_table = "events_ios.inputs"
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.VIEW_MOBILE:
|
||||
next_table = "events_ios.views"
|
||||
next_col_name = "name"
|
||||
elif event_type == schemas.EventType.CUSTOM_MOBILE:
|
||||
next_table = "events_common.customs"
|
||||
next_col_name = "name"
|
||||
elif event_type == events.EventType.CLICK_MOBILE.ui_type:
|
||||
next_table = events.EventType.CLICK_MOBILE.table
|
||||
next_col_name = events.EventType.CLICK_MOBILE.column
|
||||
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
|
||||
next_table = events.EventType.INPUT_MOBILE.table
|
||||
next_col_name = events.EventType.INPUT_MOBILE.column
|
||||
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
|
||||
next_table = events.EventType.VIEW_MOBILE.table
|
||||
next_col_name = events.EventType.VIEW_MOBILE.column
|
||||
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
|
||||
next_table = events.EventType.CUSTOM_MOBILE.table
|
||||
next_col_name = events.EventType.CUSTOM_MOBILE.column
|
||||
else:
|
||||
logger.warning(f"=================UNDEFINED:{event_type}")
|
||||
continue
|
||||
|
|
@ -242,7 +241,7 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
|
|||
:return:
|
||||
"""
|
||||
|
||||
stages: List[schemas.SessionSearchEventSchema] = filter_d.events
|
||||
stages: List[schemas.SessionSearchEventSchema2] = filter_d.events
|
||||
filters: List[schemas.SessionSearchFilterSchema] = filter_d.filters
|
||||
|
||||
stage_constraints = ["main.timestamp <= %(endTimestamp)s"]
|
||||
|
|
@ -298,10 +297,10 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
|
|||
values["maxDuration"] = f.value[1]
|
||||
elif filter_type == schemas.FilterType.REFERRER:
|
||||
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
|
||||
filter_extra_from = [f"INNER JOIN {"events.pages"} AS p USING(session_id)"]
|
||||
filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"]
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
|
||||
elif filter_type == schemas.FilterType.METADATA:
|
||||
elif filter_type == events.EventType.METADATA.ui_type:
|
||||
if meta_keys is None:
|
||||
meta_keys = metadata.get(project_id=project.project_id)
|
||||
meta_keys = {m["key"]: m["index"] for m in meta_keys}
|
||||
|
|
@ -343,31 +342,31 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
|
|||
op = sh.get_sql_operator(s.operator)
|
||||
# event_type = s["type"].upper()
|
||||
event_type = s.type
|
||||
if event_type == schemas.EventType.CLICK:
|
||||
next_table = "events.clicks"
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.INPUT:
|
||||
next_table = "events.inputs"
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.LOCATION:
|
||||
next_table = "events.pages"
|
||||
next_col_name = "path"
|
||||
elif event_type == schemas.EventType.CUSTOM:
|
||||
next_table = "events_common.customs"
|
||||
next_col_name = "name"
|
||||
if event_type == events.EventType.CLICK.ui_type:
|
||||
next_table = events.EventType.CLICK.table
|
||||
next_col_name = events.EventType.CLICK.column
|
||||
elif event_type == events.EventType.INPUT.ui_type:
|
||||
next_table = events.EventType.INPUT.table
|
||||
next_col_name = events.EventType.INPUT.column
|
||||
elif event_type == events.EventType.LOCATION.ui_type:
|
||||
next_table = events.EventType.LOCATION.table
|
||||
next_col_name = events.EventType.LOCATION.column
|
||||
elif event_type == events.EventType.CUSTOM.ui_type:
|
||||
next_table = events.EventType.CUSTOM.table
|
||||
next_col_name = events.EventType.CUSTOM.column
|
||||
# IOS --------------
|
||||
elif event_type == schemas.EventType.CLICK_MOBILE:
|
||||
next_table = "events_ios.taps"
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.INPUT_MOBILE:
|
||||
next_table = "events_ios.inputs"
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.VIEW_MOBILE:
|
||||
next_table = "events_ios.views"
|
||||
next_col_name = "name"
|
||||
elif event_type == schemas.EventType.CUSTOM_MOBILE:
|
||||
next_table = "events_common.customs"
|
||||
next_col_name = "name"
|
||||
elif event_type == events.EventType.CLICK_MOBILE.ui_type:
|
||||
next_table = events.EventType.CLICK_MOBILE.table
|
||||
next_col_name = events.EventType.CLICK_MOBILE.column
|
||||
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
|
||||
next_table = events.EventType.INPUT_MOBILE.table
|
||||
next_col_name = events.EventType.INPUT_MOBILE.column
|
||||
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
|
||||
next_table = events.EventType.VIEW_MOBILE.table
|
||||
next_col_name = events.EventType.VIEW_MOBILE.column
|
||||
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
|
||||
next_table = events.EventType.CUSTOM_MOBILE.table
|
||||
next_col_name = events.EventType.CUSTOM_MOBILE.column
|
||||
else:
|
||||
logger.warning(f"=================UNDEFINED:{event_type}")
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -8,14 +8,14 @@ from chalicelib.utils import ch_client
|
|||
from chalicelib.utils import exp_ch_helper
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.core.events import events
|
||||
from chalicelib.core import events
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas.ProjectContext,
|
||||
metric_format: schemas.MetricExtendedFormatType) -> List[RealDictRow]:
|
||||
stages: List[schemas.SessionSearchEventSchema] = filter_d.events
|
||||
stages: List[schemas.SessionSearchEventSchema2] = filter_d.events
|
||||
filters: List[schemas.SessionSearchFilterSchema] = filter_d.filters
|
||||
platform = project.platform
|
||||
constraints = ["e.project_id = %(project_id)s",
|
||||
|
|
@ -82,7 +82,7 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
|
|||
elif filter_type == schemas.FilterType.REFERRER:
|
||||
constraints.append(
|
||||
sh.multi_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
|
||||
elif filter_type == schemas.FilterType.METADATA:
|
||||
elif filter_type == events.EventType.METADATA.ui_type:
|
||||
if meta_keys is None:
|
||||
meta_keys = metadata.get(project_id=project.project_id)
|
||||
meta_keys = {m["key"]: m["index"] for m in meta_keys}
|
||||
|
|
@ -125,29 +125,29 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
|
|||
e_k = f"e_value{i}"
|
||||
event_type = s.type
|
||||
next_event_type = exp_ch_helper.get_event_type(event_type, platform=platform)
|
||||
if event_type == schemas.EventType.CLICK:
|
||||
if event_type == events.EventType.CLICK.ui_type:
|
||||
if platform == "web":
|
||||
next_col_name = "label"
|
||||
next_col_name = events.EventType.CLICK.column
|
||||
if not is_any:
|
||||
if schemas.ClickEventExtraOperator.has_value(s.operator):
|
||||
specific_condition = sh.multi_conditions(f"selector {op} %({e_k})s", s.value, value_key=e_k)
|
||||
else:
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.INPUT:
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.LOCATION:
|
||||
next_col_name = events.EventType.CLICK_MOBILE.column
|
||||
elif event_type == events.EventType.INPUT.ui_type:
|
||||
next_col_name = events.EventType.INPUT.column
|
||||
elif event_type == events.EventType.LOCATION.ui_type:
|
||||
next_col_name = 'url_path'
|
||||
elif event_type == schemas.EventType.CUSTOM:
|
||||
next_col_name = "name"
|
||||
elif event_type == events.EventType.CUSTOM.ui_type:
|
||||
next_col_name = events.EventType.CUSTOM.column
|
||||
# IOS --------------
|
||||
elif event_type == schemas.EventType.CLICK_MOBILE:
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.INPUT_MOBILE:
|
||||
next_col_name = "label"
|
||||
elif event_type == schemas.EventType.VIEW_MOBILE:
|
||||
next_col_name = "name"
|
||||
elif event_type == schemas.EventType.CUSTOM_MOBILE:
|
||||
next_col_name = "name"
|
||||
elif event_type == events.EventType.CLICK_MOBILE.ui_type:
|
||||
next_col_name = events.EventType.CLICK_MOBILE.column
|
||||
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
|
||||
next_col_name = events.EventType.INPUT_MOBILE.column
|
||||
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
|
||||
next_col_name = events.EventType.VIEW_MOBILE.column
|
||||
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
|
||||
next_col_name = events.EventType.CUSTOM_MOBILE.column
|
||||
else:
|
||||
logger.warning(f"=================UNDEFINED:{event_type}")
|
||||
continue
|
||||
|
|
|
|||
14
api/chalicelib/core/metrics/product_anaytics2.py
Normal file
14
api/chalicelib/core/metrics/product_anaytics2.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
|
||||
|
||||
def search_events(project_id: int, data: dict):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT *
|
||||
FROM taha.events
|
||||
WHERE project_id=%(project_id)s
|
||||
ORDER BY created_at;""",
|
||||
params={"project_id": project_id})
|
||||
x = ch_client.execute(r)
|
||||
|
||||
return x
|
||||
|
|
@ -1,59 +0,0 @@
|
|||
from typing import Optional
|
||||
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
|
||||
|
||||
def search_events(project_id: int, q: Optional[str] = None):
|
||||
with ClickHouseClient() as ch_client:
|
||||
full_args = {"project_id": project_id, "limit": 20}
|
||||
|
||||
constraints = ["project_id = %(project_id)s",
|
||||
"_timestamp >= now()-INTERVAL 1 MONTH"]
|
||||
if q:
|
||||
constraints += ["value ILIKE %(q)s"]
|
||||
full_args["q"] = helper.string_to_sql_like(q)
|
||||
query = ch_client.format(
|
||||
f"""SELECT value,data_count
|
||||
FROM product_analytics.autocomplete_events_grouped
|
||||
WHERE {" AND ".join(constraints)}
|
||||
ORDER BY data_count DESC
|
||||
LIMIT %(limit)s;""",
|
||||
parameters=full_args)
|
||||
rows = ch_client.execute(query)
|
||||
|
||||
return {"values": helper.list_to_camel_case(rows), "_src": 2}
|
||||
|
||||
|
||||
def search_properties(project_id: int, property_name: Optional[str] = None, event_name: Optional[str] = None,
|
||||
q: Optional[str] = None):
|
||||
with ClickHouseClient() as ch_client:
|
||||
select = "value"
|
||||
full_args = {"project_id": project_id, "limit": 20,
|
||||
"event_name": event_name, "property_name": property_name, "q": q,
|
||||
"property_name_l": helper.string_to_sql_like(property_name),
|
||||
"q_l": helper.string_to_sql_like(q)}
|
||||
|
||||
constraints = ["project_id = %(project_id)s",
|
||||
"_timestamp >= now()-INTERVAL 1 MONTH"]
|
||||
if event_name:
|
||||
constraints += ["event_name = %(event_name)s"]
|
||||
|
||||
if property_name and q:
|
||||
constraints += ["property_name = %(property_name)s"]
|
||||
elif property_name:
|
||||
select = "DISTINCT ON(property_name) property_name AS value"
|
||||
constraints += ["property_name ILIKE %(property_name_l)s"]
|
||||
|
||||
if q:
|
||||
constraints += ["value ILIKE %(q_l)s"]
|
||||
query = ch_client.format(
|
||||
f"""SELECT {select},data_count
|
||||
FROM product_analytics.autocomplete_event_properties_grouped
|
||||
WHERE {" AND ".join(constraints)}
|
||||
ORDER BY data_count DESC
|
||||
LIMIT %(limit)s;""",
|
||||
parameters=full_args)
|
||||
rows = ch_client.execute(query)
|
||||
|
||||
return {"values": helper.list_to_camel_case(rows), "_src": 2}
|
||||
|
|
@ -1,182 +0,0 @@
|
|||
import logging
|
||||
|
||||
import schemas
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
from chalicelib.utils.exp_ch_helper import get_sub_condition, get_col_cast
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
PREDEFINED_EVENTS = {
|
||||
"CLICK": "String",
|
||||
"INPUT": "String",
|
||||
"LOCATION": "String",
|
||||
"ERROR": "String",
|
||||
"PERFORMANCE": "String",
|
||||
"REQUEST": "String"
|
||||
}
|
||||
|
||||
|
||||
def get_events(project_id: int, page: schemas.PaginatedSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT DISTINCT
|
||||
ON(event_name,auto_captured)
|
||||
COUNT (1) OVER () AS total,
|
||||
event_name AS name, display_name, description,
|
||||
auto_captured
|
||||
FROM product_analytics.all_events
|
||||
WHERE project_id=%(project_id)s
|
||||
ORDER BY auto_captured, display_name
|
||||
LIMIT %(limit)s
|
||||
OFFSET %(offset)s;""",
|
||||
parameters={"project_id": project_id, "limit": page.limit, "offset": (page.page - 1) * page.limit})
|
||||
rows = ch_client.execute(r)
|
||||
if len(rows) == 0:
|
||||
return {"total": len(PREDEFINED_EVENTS), "list": [{
|
||||
"name": e,
|
||||
"displayName": "",
|
||||
"description": "",
|
||||
"autoCaptured": True,
|
||||
"id": "event_0",
|
||||
"dataType": "string",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"_foundInPredefinedList": False
|
||||
} for e in PREDEFINED_EVENTS]}
|
||||
total = rows[0]["total"]
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
for i, row in enumerate(rows):
|
||||
row["id"] = f"event_{i}"
|
||||
row["dataType"] = "string"
|
||||
row["possibleTypes"] = ["string"]
|
||||
row["_foundInPredefinedList"] = True
|
||||
row.pop("total")
|
||||
keys = [r["name"] for r in rows]
|
||||
for e in PREDEFINED_EVENTS:
|
||||
if e not in keys:
|
||||
total += 1
|
||||
rows.append({
|
||||
"name": e,
|
||||
"displayName": "",
|
||||
"description": "",
|
||||
"autoCaptured": True,
|
||||
"id": "event_0",
|
||||
"dataType": "string",
|
||||
"possibleTypes": [
|
||||
"string"
|
||||
],
|
||||
"_foundInPredefinedList": False
|
||||
})
|
||||
return {"total": total, "list": rows}
|
||||
|
||||
|
||||
def search_events(project_id: int, data: schemas.EventsSearchPayloadSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp,
|
||||
"projectId": project_id, "limit": data.limit, "offset": (data.page - 1) * data.limit}
|
||||
|
||||
constraints = ["project_id = %(projectId)s",
|
||||
"created_at >= toDateTime(%(startDate)s/1000)",
|
||||
"created_at <= toDateTime(%(endDate)s/1000)"]
|
||||
ev_constraints = []
|
||||
for i, f in enumerate(data.filters):
|
||||
if not f.is_event:
|
||||
f.value = helper.values_for_operator(value=f.value, op=f.operator)
|
||||
f_k = f"f_value{i}"
|
||||
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
|
||||
is_any = sh.isAny_opreator(f.operator)
|
||||
is_undefined = sh.isUndefined_operator(f.operator)
|
||||
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
|
||||
if f.is_predefined:
|
||||
column = f.name
|
||||
else:
|
||||
column = f"properties.{f.name}"
|
||||
|
||||
if is_any:
|
||||
condition = f"notEmpty{column})"
|
||||
elif is_undefined:
|
||||
condition = f"empty({column})"
|
||||
else:
|
||||
condition = sh.multi_conditions(
|
||||
get_sub_condition(col_name=column, val_name=f_k, operator=f.operator),
|
||||
values=f.value, value_key=f_k)
|
||||
constraints.append(condition)
|
||||
|
||||
else:
|
||||
e_k = f"e_value{i}"
|
||||
full_args = {**full_args, e_k: f.name}
|
||||
condition = f"`$event_name` = %({e_k})s"
|
||||
sub_conditions = []
|
||||
for j, ef in enumerate(f.properties.filters):
|
||||
p_k = f"e_{i}_p_{j}"
|
||||
full_args = {**full_args, **sh.multi_values(ef.value, value_key=p_k, data_type=ef.data_type)}
|
||||
cast = get_col_cast(data_type=ef.data_type, value=ef.value)
|
||||
if ef.is_predefined:
|
||||
sub_condition = get_sub_condition(col_name=f"accurateCastOrNull(`{ef.name}`,'{cast}')",
|
||||
val_name=p_k, operator=ef.operator)
|
||||
else:
|
||||
sub_condition = get_sub_condition(col_name=f"accurateCastOrNull(properties.`{ef.name}`,{cast})",
|
||||
val_name=p_k, operator=ef.operator)
|
||||
sub_conditions.append(sh.multi_conditions(sub_condition, ef.value, value_key=p_k))
|
||||
if len(sub_conditions) > 0:
|
||||
condition += " AND (" + (" " + f.properties.operator + " ").join(sub_conditions) + ")"
|
||||
|
||||
ev_constraints.append(condition)
|
||||
|
||||
constraints.append("(" + " OR ".join(ev_constraints) + ")")
|
||||
query = ch_client.format(
|
||||
f"""SELECT COUNT(1) OVER () AS total,
|
||||
event_id,
|
||||
`$event_name`,
|
||||
created_at,
|
||||
`distinct_id`,
|
||||
`$browser`,
|
||||
`$import`,
|
||||
`$os`,
|
||||
`$country`,
|
||||
`$state`,
|
||||
`$city`,
|
||||
`$screen_height`,
|
||||
`$screen_width`,
|
||||
`$source`,
|
||||
`$user_id`,
|
||||
`$device`
|
||||
FROM product_analytics.events
|
||||
WHERE {" AND ".join(constraints)}
|
||||
ORDER BY created_at
|
||||
LIMIT %(limit)s OFFSET %(offset)s;""",
|
||||
parameters=full_args)
|
||||
rows = ch_client.execute(query)
|
||||
if len(rows) == 0:
|
||||
return {"total": 0, "rows": [], "_src": 2}
|
||||
total = rows[0]["total"]
|
||||
for r in rows:
|
||||
r.pop("total")
|
||||
return {"total": total, "rows": rows, "_src": 2}
|
||||
|
||||
|
||||
def get_lexicon(project_id: int, page: schemas.PaginatedSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT COUNT(1) OVER () AS total, all_events.event_name AS name,
|
||||
*
|
||||
FROM product_analytics.all_events
|
||||
WHERE project_id = %(project_id)s
|
||||
ORDER BY display_name
|
||||
LIMIT %(limit)s
|
||||
OFFSET %(offset)s;""",
|
||||
parameters={"project_id": project_id, "limit": page.limit, "offset": (page.page - 1) * page.limit})
|
||||
rows = ch_client.execute(r)
|
||||
if len(rows) == 0:
|
||||
return {"total": 0, "list": []}
|
||||
total = rows[0]["total"]
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
for i, row in enumerate(rows):
|
||||
row["id"] = f"event_{i}"
|
||||
row["dataType"] = "string"
|
||||
row["possibleTypes"] = ["string"]
|
||||
row["_foundInPredefinedList"] = True
|
||||
row.pop("total")
|
||||
return {"total": total, "list": rows}
|
||||
|
|
@ -1,167 +0,0 @@
|
|||
import schemas
|
||||
from chalicelib.utils import helper, exp_ch_helper
|
||||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
|
||||
PREDEFINED_PROPERTIES = {
|
||||
"label": "String",
|
||||
"hesitation_time": "UInt32",
|
||||
"name": "String",
|
||||
"payload": "String",
|
||||
"level": "Enum8",
|
||||
"source": "Enum8",
|
||||
"message": "String",
|
||||
"error_id": "String",
|
||||
"duration": "UInt16",
|
||||
"context": "Enum8",
|
||||
"url_host": "String",
|
||||
"url_path": "String",
|
||||
"url_hostpath": "String",
|
||||
"request_start": "UInt16",
|
||||
"response_start": "UInt16",
|
||||
"response_end": "UInt16",
|
||||
"dom_content_loaded_event_start": "UInt16",
|
||||
"dom_content_loaded_event_end": "UInt16",
|
||||
"load_event_start": "UInt16",
|
||||
"load_event_end": "UInt16",
|
||||
"first_paint": "UInt16",
|
||||
"first_contentful_paint_time": "UInt16",
|
||||
"speed_index": "UInt16",
|
||||
"visually_complete": "UInt16",
|
||||
"time_to_interactive": "UInt16",
|
||||
"ttfb": "UInt16",
|
||||
"ttlb": "UInt16",
|
||||
"response_time": "UInt16",
|
||||
"dom_building_time": "UInt16",
|
||||
"dom_content_loaded_event_time": "UInt16",
|
||||
"load_event_time": "UInt16",
|
||||
"min_fps": "UInt8",
|
||||
"avg_fps": "UInt8",
|
||||
"max_fps": "UInt8",
|
||||
"min_cpu": "UInt8",
|
||||
"avg_cpu": "UInt8",
|
||||
"max_cpu": "UInt8",
|
||||
"min_total_js_heap_size": "UInt64",
|
||||
"avg_total_js_heap_size": "UInt64",
|
||||
"max_total_js_heap_size": "UInt64",
|
||||
"min_used_js_heap_size": "UInt64",
|
||||
"avg_used_js_heap_size": "UInt64",
|
||||
"max_used_js_heap_size": "UInt64",
|
||||
"method": "Enum8",
|
||||
"status": "UInt16",
|
||||
"success": "UInt8",
|
||||
"request_body": "String",
|
||||
"response_body": "String",
|
||||
"transfer_size": "UInt32",
|
||||
"selector": "String",
|
||||
"normalized_x": "Float32",
|
||||
"normalized_y": "Float32",
|
||||
"message_id": "UInt64"
|
||||
}
|
||||
|
||||
|
||||
def get_all_properties(project_id: int, page: schemas.PaginatedSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT COUNT(1) OVER () AS total, property_name AS name,
|
||||
display_name,
|
||||
array_agg(DISTINCT event_properties.value_type) AS possible_types
|
||||
FROM product_analytics.all_properties
|
||||
LEFT JOIN product_analytics.event_properties USING (project_id, property_name)
|
||||
WHERE all_properties.project_id = %(project_id)s
|
||||
GROUP BY property_name, display_name
|
||||
ORDER BY display_name
|
||||
LIMIT %(limit)s
|
||||
OFFSET %(offset)s;""",
|
||||
parameters={"project_id": project_id,
|
||||
"limit": page.limit,
|
||||
"offset": (page.page - 1) * page.limit})
|
||||
properties = ch_client.execute(r)
|
||||
if len(properties) == 0:
|
||||
return {"total": 0, "list": []}
|
||||
total = properties[0]["total"]
|
||||
properties = helper.list_to_camel_case(properties)
|
||||
for i, p in enumerate(properties):
|
||||
p["id"] = f"prop_{i}"
|
||||
p["_foundInPredefinedList"] = False
|
||||
if p["name"] in PREDEFINED_PROPERTIES:
|
||||
p["dataType"] = exp_ch_helper.simplify_clickhouse_type(PREDEFINED_PROPERTIES[p["name"]])
|
||||
p["_foundInPredefinedList"] = True
|
||||
p["possibleTypes"] = list(set(exp_ch_helper.simplify_clickhouse_types(p["possibleTypes"])))
|
||||
p.pop("total")
|
||||
keys = [p["name"] for p in properties]
|
||||
for p in PREDEFINED_PROPERTIES:
|
||||
if p not in keys:
|
||||
total += 1
|
||||
properties.append({
|
||||
"name": p,
|
||||
"displayName": "",
|
||||
"possibleTypes": [
|
||||
],
|
||||
"id": f"prop_{len(properties) + 1}",
|
||||
"_foundInPredefinedList": False,
|
||||
"dataType": PREDEFINED_PROPERTIES[p]
|
||||
})
|
||||
return {"total": total, "list": properties}
|
||||
|
||||
|
||||
def get_event_properties(project_id: int, event_name):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT all_properties.property_name AS name,
|
||||
all_properties.display_name,
|
||||
array_agg(DISTINCT event_properties.value_type) AS possible_types
|
||||
FROM product_analytics.event_properties
|
||||
INNER JOIN product_analytics.all_properties USING (property_name)
|
||||
WHERE event_properties.project_id = %(project_id)s
|
||||
AND all_properties.project_id = %(project_id)s
|
||||
AND event_properties.event_name = %(event_name)s
|
||||
GROUP BY ALL
|
||||
ORDER BY 1;""",
|
||||
parameters={"project_id": project_id, "event_name": event_name})
|
||||
properties = ch_client.execute(r)
|
||||
properties = helper.list_to_camel_case(properties)
|
||||
for i, p in enumerate(properties):
|
||||
p["id"] = f"prop_{i}"
|
||||
p["_foundInPredefinedList"] = False
|
||||
if p["name"] in PREDEFINED_PROPERTIES:
|
||||
p["dataType"] = exp_ch_helper.simplify_clickhouse_type(PREDEFINED_PROPERTIES[p["name"]])
|
||||
p["_foundInPredefinedList"] = True
|
||||
p["possibleTypes"] = list(set(exp_ch_helper.simplify_clickhouse_types(p["possibleTypes"])))
|
||||
|
||||
return properties
|
||||
|
||||
|
||||
def get_lexicon(project_id: int, page: schemas.PaginatedSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT COUNT(1) OVER () AS total, all_properties.property_name AS name,
|
||||
all_properties.*,
|
||||
possible_types.values AS possible_types,
|
||||
possible_values.values AS sample_values
|
||||
FROM product_analytics.all_properties
|
||||
LEFT JOIN (SELECT project_id, property_name, array_agg(DISTINCT value_type) AS
|
||||
values
|
||||
FROM product_analytics.event_properties
|
||||
WHERE project_id=%(project_id)s
|
||||
GROUP BY 1, 2) AS possible_types
|
||||
USING (project_id, property_name)
|
||||
LEFT JOIN (SELECT project_id, property_name, array_agg(DISTINCT value) AS
|
||||
values
|
||||
FROM product_analytics.property_values_samples
|
||||
WHERE project_id=%(project_id)s
|
||||
GROUP BY 1, 2) AS possible_values USING (project_id, property_name)
|
||||
WHERE project_id = %(project_id)s
|
||||
ORDER BY display_name
|
||||
LIMIT %(limit)s
|
||||
OFFSET %(offset)s;""",
|
||||
parameters={"project_id": project_id,
|
||||
"limit": page.limit,
|
||||
"offset": (page.page - 1) * page.limit})
|
||||
properties = ch_client.execute(r)
|
||||
if len(properties) == 0:
|
||||
return {"total": 0, "list": []}
|
||||
total = properties[0]["total"]
|
||||
for i, p in enumerate(properties):
|
||||
p["id"] = f"prop_{i}"
|
||||
p.pop("total")
|
||||
return {"total": total, "list": helper.list_to_camel_case(properties)}
|
||||
|
|
@ -6,18 +6,8 @@ logger = logging.getLogger(__name__)
|
|||
from . import sessions_pg
|
||||
from . import sessions_pg as sessions_legacy
|
||||
from . import sessions_ch
|
||||
from . import sessions_search_pg
|
||||
from . import sessions_search_pg as sessions_search_legacy
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental sessions search")
|
||||
if config("EXP_METRICS", cast=bool, default=False):
|
||||
from . import sessions_ch as sessions
|
||||
from . import sessions_search_ch as sessions_search
|
||||
else:
|
||||
from . import sessions_pg as sessions
|
||||
from . import sessions_search_pg as sessions_search
|
||||
|
||||
# if config("EXP_METRICS", cast=bool, default=False):
|
||||
# from . import sessions_ch as sessions
|
||||
# else:
|
||||
# from . import sessions_pg as sessions
|
||||
|
|
|
|||
|
|
@ -2,12 +2,10 @@ import logging
|
|||
from typing import List, Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import metadata
|
||||
from chalicelib.core.events import events
|
||||
from chalicelib.core import events, metadata
|
||||
from . import performance_event, sessions_legacy
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.exp_ch_helper import get_sub_condition, get_col_cast
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -50,8 +48,8 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
query = f"""SELECT gs.generate_series AS timestamp,
|
||||
COALESCE(COUNT(DISTINCT processed_sessions.user_id),0) AS count
|
||||
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS gs
|
||||
LEFT JOIN (SELECT multiIf(isNotNull(s.user_id) AND notEmpty(s.user_id), s.user_id,
|
||||
isNotNull(s.user_anonymous_id) AND notEmpty(s.user_anonymous_id),
|
||||
LEFT JOIN (SELECT multiIf(s.user_id IS NOT NULL AND s.user_id != '', s.user_id,
|
||||
s.user_anonymous_id IS NOT NULL AND s.user_anonymous_id != '',
|
||||
s.user_anonymous_id, toString(s.user_uuid)) AS user_id,
|
||||
s.datetime AS datetime
|
||||
{query_part}) AS processed_sessions ON(TRUE)
|
||||
|
|
@ -150,7 +148,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
for e in data.events:
|
||||
if e.type == schemas.EventType.LOCATION:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
|
|
@ -175,7 +173,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
for e in data.events:
|
||||
if e.type == schemas.EventType.REQUEST_DETAILS:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
|
|
@ -255,7 +253,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
FROM (SELECT s.user_id AS user_id {extra_col}
|
||||
{query_part}
|
||||
WHERE isNotNull(user_id)
|
||||
AND notEmpty(user_id)) AS filtred_sessions
|
||||
AND user_id != '') AS filtred_sessions
|
||||
{extra_where}
|
||||
GROUP BY {main_col}
|
||||
ORDER BY total DESC
|
||||
|
|
@ -279,7 +277,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
return sessions
|
||||
|
||||
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema):
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
|
||||
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
|
||||
schemas.EventType.GRAPHQL] \
|
||||
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
|
||||
|
|
@ -332,11 +330,7 @@ def json_condition(table_alias, json_column, json_key, op, values, value_key, ch
|
|||
extract_func = "JSONExtractFloat" if numeric_type == "float" else "JSONExtractInt"
|
||||
condition = f"{extract_func}(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s"
|
||||
else:
|
||||
# condition = f"JSONExtractString(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s"
|
||||
condition = get_sub_condition(
|
||||
col_name=f"JSONExtractString(toString({table_alias}.`{json_column}`), '{json_key}')",
|
||||
val_name=value_key, operator=op
|
||||
)
|
||||
condition = f"JSONExtractString(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s"
|
||||
|
||||
conditions.append(sh.multi_conditions(condition, values, value_key=value_key))
|
||||
|
||||
|
|
@ -379,34 +373,6 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
events_conditions_where = ["main.project_id = %(projectId)s",
|
||||
"main.created_at >= toDateTime(%(startDate)s/1000)",
|
||||
"main.created_at <= toDateTime(%(endDate)s/1000)"]
|
||||
any_incident = False
|
||||
for i, e in enumerate(data.events):
|
||||
if e.type == schemas.EventType.INCIDENT and e.operator == schemas.SearchEventOperator.IS_ANY:
|
||||
any_incident = True
|
||||
data.events.pop(i)
|
||||
# don't stop here because we could have multiple filters looking for any incident
|
||||
|
||||
if any_incident:
|
||||
any_incident = False
|
||||
for f in data.filters:
|
||||
if f.type == schemas.FilterType.ISSUE:
|
||||
any_incident = True
|
||||
if f.value.index(schemas.IssueType.INCIDENT) < 0:
|
||||
f.value.append(schemas.IssueType.INCIDENT)
|
||||
if f.operator == schemas.SearchEventOperator.IS_ANY:
|
||||
f.operator = schemas.SearchEventOperator.IS
|
||||
break
|
||||
|
||||
if not any_incident:
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(**{
|
||||
"type": "issue",
|
||||
"isEvent": False,
|
||||
"value": [
|
||||
"incident"
|
||||
],
|
||||
"operator": "is"
|
||||
}))
|
||||
|
||||
if len(data.filters) > 0:
|
||||
meta_keys = None
|
||||
# to reduce include a sub-query of sessions inside events query, in order to reduce the selected data
|
||||
|
|
@ -550,7 +516,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
ss_constraints.append(
|
||||
sh.multi_conditions(f"ms.base_referrer {op} toString(%({f_k})s)", f.value, is_not=is_not,
|
||||
value_key=f_k))
|
||||
elif filter_type == schemas.FilterType.METADATA:
|
||||
elif filter_type == events.EventType.METADATA.ui_type:
|
||||
# get metadata list only if you need it
|
||||
if meta_keys is None:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
|
|
@ -694,60 +660,39 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
event.value = helper.values_for_operator(value=event.value, op=event.operator)
|
||||
full_args = {**full_args,
|
||||
**sh.multi_values(event.value, value_key=e_k),
|
||||
**sh.multi_values(event.source, value_key=s_k),
|
||||
e_k: event.value[0] if len(event.value) > 0 else event.value}
|
||||
**sh.multi_values(event.source, value_key=s_k)}
|
||||
|
||||
if event_type == schemas.EventType.CLICK:
|
||||
if event_type == events.EventType.CLICK.ui_type:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
if platform == "web":
|
||||
_column = "label"
|
||||
_column = events.EventType.CLICK.column
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
if schemas.ClickEventExtraOperator.has_value(event.operator):
|
||||
# event_where.append(json_condition(
|
||||
# "main",
|
||||
# "$properties",
|
||||
# "selector", op, event.value, e_k)
|
||||
# )
|
||||
event_where.append(
|
||||
sh.multi_conditions(
|
||||
get_sub_condition(col_name=f"main.`$properties`.selector",
|
||||
val_name=e_k, operator=event.operator),
|
||||
event.value, value_key=e_k)
|
||||
event_where.append(json_condition(
|
||||
"main",
|
||||
"$properties",
|
||||
"selector", op, event.value, e_k)
|
||||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
if is_not:
|
||||
# event_where.append(json_condition(
|
||||
# "sub", "$properties", _column, op, event.value, e_k
|
||||
# ))
|
||||
event_where.append(
|
||||
sh.multi_conditions(
|
||||
get_sub_condition(col_name=f"sub.`$properties`.{_column}",
|
||||
val_name=e_k, operator=event.operator),
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
event_where.append(json_condition(
|
||||
"sub", "$properties", _column, op, event.value, e_k
|
||||
))
|
||||
events_conditions_not.append(
|
||||
{
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"
|
||||
}
|
||||
)
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
||||
events_conditions_not[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
# event_where.append(
|
||||
# json_condition("main", "$properties", _column, op, event.value, e_k)
|
||||
# )
|
||||
event_where.append(
|
||||
sh.multi_conditions(
|
||||
get_sub_condition(col_name=f"main.`$properties`.{_column}",
|
||||
val_name=e_k, operator=event.operator),
|
||||
event.value, value_key=e_k)
|
||||
json_condition("main", "$properties", _column, op, event.value, e_k)
|
||||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
_column = "label"
|
||||
_column = events.EventType.CLICK_MOBILE.column
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -766,10 +711,10 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
|
||||
elif event_type == schemas.EventType.INPUT:
|
||||
elif event_type == events.EventType.INPUT.ui_type:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
if platform == "web":
|
||||
_column = "label"
|
||||
_column = events.EventType.INPUT.column
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -794,7 +739,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
|
||||
full_args = {**full_args, **sh.multi_values(event.source, value_key=f"custom{i}")}
|
||||
else:
|
||||
_column = "label"
|
||||
_column = events.EventType.INPUT_MOBILE.column
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -814,7 +759,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
|
||||
elif event_type == schemas.EventType.LOCATION:
|
||||
elif event_type == events.EventType.LOCATION.ui_type:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
if platform == "web":
|
||||
_column = 'url_path'
|
||||
|
|
@ -836,7 +781,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
_column = "name"
|
||||
_column = events.EventType.VIEW_MOBILE.column
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -853,9 +798,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == schemas.EventType.CUSTOM:
|
||||
elif event_type == events.EventType.CUSTOM.ui_type:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = "name"
|
||||
_column = events.EventType.CUSTOM.column
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -873,7 +818,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
"main", "$properties", _column, op, event.value, e_k
|
||||
))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == schemas.EventType.REQUEST:
|
||||
elif event_type == events.EventType.REQUEST.ui_type:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = 'url_path'
|
||||
event_where.append(
|
||||
|
|
@ -894,9 +839,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
|
||||
elif event_type == schemas.EventType.STATE_ACTION:
|
||||
elif event_type == events.EventType.STATEACTION.ui_type:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = "name"
|
||||
_column = events.EventType.STATEACTION.column
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -915,7 +860,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
# TODO: isNot for ERROR
|
||||
elif event_type == schemas.EventType.ERROR:
|
||||
elif event_type == events.EventType.ERROR.ui_type:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main"
|
||||
events_extra_join = f"SELECT * FROM {MAIN_EVENTS_TABLE} AS main1 WHERE main1.project_id=%(project_id)s"
|
||||
event_where.append(
|
||||
|
|
@ -925,23 +870,20 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
events_conditions[-1]["condition"] = []
|
||||
if not is_any and event.value not in [None, "*", ""]:
|
||||
event_where.append(
|
||||
sh.multi_conditions(
|
||||
f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
|
||||
event.value, value_key=e_k))
|
||||
sh.multi_conditions(f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
|
||||
event.value, value_key=e_k))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
events_extra_join += f" AND {event_where[-1]}"
|
||||
if len(event.source) > 0 and event.source[0] not in [None, "*", ""]:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source,
|
||||
value_key=s_k))
|
||||
event_where.append(sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source, value_key=s_k))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
events_extra_join += f" AND {event_where[-1]}"
|
||||
|
||||
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
|
||||
|
||||
# ----- Mobile
|
||||
elif event_type == schemas.EventType.CLICK_MOBILE:
|
||||
_column = "label"
|
||||
elif event_type == events.EventType.CLICK_MOBILE.ui_type:
|
||||
_column = events.EventType.CLICK_MOBILE.column
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -959,8 +901,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
"main", "$properties", _column, op, event.value, e_k
|
||||
))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == schemas.EventType.INPUT_MOBILE:
|
||||
_column = "label"
|
||||
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
|
||||
_column = events.EventType.INPUT_MOBILE.column
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -978,8 +920,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
"main", "$properties", _column, op, event.value, e_k
|
||||
))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == schemas.EventType.VIEW_MOBILE:
|
||||
_column = "name"
|
||||
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
|
||||
_column = events.EventType.VIEW_MOBILE.column
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -997,8 +939,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
"main", "$properties", _column, op, event.value, e_k
|
||||
))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == schemas.EventType.CUSTOM_MOBILE:
|
||||
_column = "name"
|
||||
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
|
||||
_column = events.EventType.CUSTOM_MOBILE.column
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -1017,7 +959,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
))
|
||||
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == schemas.EventType.REQUEST_MOBILE:
|
||||
elif event_type == events.EventType.REQUEST_MOBILE.ui_type:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = 'url_path'
|
||||
event_where.append(
|
||||
|
|
@ -1037,8 +979,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
"main", "$properties", _column, op, event.value, e_k
|
||||
))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == schemas.EventType.ERROR_MOBILE:
|
||||
_column = "name"
|
||||
elif event_type == events.EventType.CRASH_MOBILE.ui_type:
|
||||
_column = events.EventType.CRASH_MOBILE.column
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -1057,8 +999,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
"main", "$properties", _column, op, event.value, e_k
|
||||
))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == schemas.EventType.SWIPE_MOBILE and platform != "web":
|
||||
_column = "label"
|
||||
elif event_type == events.EventType.SWIPE_MOBILE.ui_type and platform != "web":
|
||||
_column = events.EventType.SWIPE_MOBILE.column
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
|
@ -1259,7 +1201,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
|
||||
if f.type == schemas.GraphqlFilterType.GRAPHQL_NAME:
|
||||
event_where.append(json_condition(
|
||||
"main", "$properties", "name", op, f.value, e_k_f
|
||||
"main", "$properties", events.EventType.GRAPHQL.column, op, f.value, e_k_f
|
||||
))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
elif f.type == schemas.GraphqlFilterType.GRAPHQL_METHOD:
|
||||
|
|
@ -1280,92 +1222,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
else:
|
||||
logging.warning(f"undefined GRAPHQL filter: {f.type}")
|
||||
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
|
||||
elif event_type == schemas.EventType.EVENT:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = "label"
|
||||
event_where.append(f"main.`$event_name`=%({e_k})s AND main.session_id>0")
|
||||
events_conditions.append({"type": event_where[-1], "condition": ""})
|
||||
elif event_type == schemas.EventType.INCIDENT:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = "label"
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
||||
if is_not:
|
||||
event_where.append(
|
||||
sh.multi_conditions(
|
||||
get_sub_condition(col_name=f"sub.`$properties`.{_column}",
|
||||
val_name=e_k, operator=event.operator),
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions_not.append(
|
||||
{
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"
|
||||
}
|
||||
)
|
||||
events_conditions_not[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
|
||||
event_where.append(
|
||||
sh.multi_conditions(
|
||||
get_sub_condition(col_name=f"main.`$properties`.{_column}",
|
||||
val_name=e_k, operator=event.operator),
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
elif event_type == schemas.EventType.CLICK_COORDINATES:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
event_where.append(
|
||||
f"main.`$event_name`='{exp_ch_helper.get_event_type(schemas.EventType.CLICK, platform=platform)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
|
||||
if is_not:
|
||||
event_where.append(
|
||||
sh.coordinate_conditions(
|
||||
condition_x=f"sub.`$properties`.normalized_x",
|
||||
condition_y=f"sub.`$properties`.normalized_y",
|
||||
values=event.value, value_key=e_k, is_not=True)
|
||||
)
|
||||
events_conditions_not.append(
|
||||
{
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(schemas.EventType.CLICK, platform=platform)}'"
|
||||
}
|
||||
)
|
||||
events_conditions_not[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
event_where.append(
|
||||
sh.coordinate_conditions(
|
||||
condition_x=f"main.`$properties`.normalized_x",
|
||||
condition_y=f"main.`$properties`.normalized_y",
|
||||
values=event.value, value_key=e_k, is_not=True)
|
||||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
|
||||
else:
|
||||
continue
|
||||
if event.properties is not None and len(event.properties.filters) > 0:
|
||||
sub_conditions = []
|
||||
for l, property in enumerate(event.properties.filters):
|
||||
a_k = f"{e_k}_att_{l}"
|
||||
full_args = {**full_args,
|
||||
**sh.multi_values(property.value, value_key=a_k, data_type=property.data_type)}
|
||||
cast = get_col_cast(data_type=property.data_type, value=property.value)
|
||||
if property.is_predefined:
|
||||
condition = get_sub_condition(col_name=f"accurateCastOrNull(main.`{property.name}`,'{cast}')",
|
||||
val_name=a_k, operator=property.operator)
|
||||
else:
|
||||
condition = get_sub_condition(
|
||||
col_name=f"accurateCastOrNull(main.properties.`{property.name}`,'{cast}')",
|
||||
val_name=a_k, operator=property.operator)
|
||||
event_where.append(
|
||||
sh.multi_conditions(condition, property.value, value_key=a_k)
|
||||
)
|
||||
sub_conditions.append(event_where[-1])
|
||||
if len(sub_conditions) > 0:
|
||||
sub_conditions = (" " + event.properties.operator + " ").join(sub_conditions)
|
||||
events_conditions[-1]["condition"] += " AND " if len(events_conditions[-1]["condition"]) > 0 else ""
|
||||
events_conditions[-1]["condition"] += "(" + sub_conditions + ")"
|
||||
if event_index == 0 or or_events:
|
||||
event_where += ss_constraints
|
||||
if is_not:
|
||||
|
|
@ -1591,7 +1449,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
if c_f.type == schemas.FetchFilterType.FETCH_URL.value:
|
||||
_extra_or_condition.append(
|
||||
sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s",
|
||||
c_f.value, value_key=e_k))
|
||||
c_f.value, value_key=e_k))
|
||||
else:
|
||||
logging.warning(f"unsupported extra_event type:${c.type}")
|
||||
if len(_extra_or_condition) > 0:
|
||||
|
|
@ -1663,15 +1521,18 @@ def get_user_sessions(project_id, user_id, start_date, end_date):
|
|||
def get_session_user(project_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
""" \
|
||||
SELECT user_id,
|
||||
count(*) as session_count,
|
||||
max(start_ts) as last_seen,
|
||||
min(start_ts) as first_seen
|
||||
FROM "public".sessions
|
||||
WHERE project_id = %(project_id)s
|
||||
AND user_id = %(userId)s
|
||||
AND duration is not null
|
||||
"""\
|
||||
SELECT
|
||||
user_id,
|
||||
count(*) as session_count,
|
||||
max(start_ts) as last_seen,
|
||||
min(start_ts) as first_seen
|
||||
FROM
|
||||
"public".sessions
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
AND user_id = %(userId)s
|
||||
AND duration is not null
|
||||
GROUP BY user_id;
|
||||
""",
|
||||
{"project_id": project_id, "userId": user_id}
|
||||
|
|
|
|||
269
api/chalicelib/core/sessions/sessions_notes.py
Normal file
269
api/chalicelib/core/sessions/sessions_notes.py
Normal file
|
|
@ -0,0 +1,269 @@
|
|||
import logging
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core.collaborations.collaboration_msteams import MSTeams
|
||||
from chalicelib.core.collaborations.collaboration_slack import Slack
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_note(tenant_id, project_id, user_id, note_id, share=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
|
||||
{",(SELECT name FROM users WHERE user_id=%(share)s AND deleted_at ISNULL) AS share_name" if share else ""}
|
||||
FROM sessions_notes INNER JOIN users USING (user_id)
|
||||
WHERE sessions_notes.project_id = %(project_id)s
|
||||
AND sessions_notes.note_id = %(note_id)s
|
||||
AND sessions_notes.deleted_at IS NULL
|
||||
AND (sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public);""",
|
||||
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id,
|
||||
"note_id": note_id, "share": share})
|
||||
|
||||
cur.execute(query=query)
|
||||
row = cur.fetchone()
|
||||
row = helper.dict_to_camel_case(row)
|
||||
if row:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
row["updatedAt"] = TimeUTC.datetime_to_timestamp(row["updatedAt"])
|
||||
return row
|
||||
|
||||
|
||||
def get_session_notes(tenant_id, project_id, session_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
|
||||
FROM sessions_notes INNER JOIN users USING (user_id)
|
||||
WHERE sessions_notes.project_id = %(project_id)s
|
||||
AND sessions_notes.deleted_at IS NULL
|
||||
AND sessions_notes.session_id = %(session_id)s
|
||||
AND (sessions_notes.user_id = %(user_id)s
|
||||
OR sessions_notes.is_public)
|
||||
ORDER BY created_at DESC;""",
|
||||
{"project_id": project_id, "user_id": user_id,
|
||||
"tenant_id": tenant_id, "session_id": session_id})
|
||||
|
||||
cur.execute(query=query)
|
||||
rows = cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
for row in rows:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
return rows
|
||||
|
||||
|
||||
def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
# base conditions
|
||||
conditions = [
|
||||
"sessions_notes.project_id = %(project_id)s",
|
||||
"sessions_notes.deleted_at IS NULL"
|
||||
]
|
||||
params = {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}
|
||||
|
||||
# tag conditions
|
||||
if data.tags:
|
||||
tag_key = "tag_value"
|
||||
conditions.append(
|
||||
sh.multi_conditions(f"%({tag_key})s = sessions_notes.tag", data.tags, value_key=tag_key)
|
||||
)
|
||||
params.update(sh.multi_values(data.tags, value_key=tag_key))
|
||||
|
||||
# filter by ownership or shared status
|
||||
if data.shared_only:
|
||||
conditions.append("sessions_notes.is_public IS TRUE")
|
||||
elif data.mine_only:
|
||||
conditions.append("sessions_notes.user_id = %(user_id)s")
|
||||
else:
|
||||
conditions.append("(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public)")
|
||||
|
||||
# search condition
|
||||
if data.search:
|
||||
conditions.append("sessions_notes.message ILIKE %(search)s")
|
||||
params["search"] = f"%{data.search}%"
|
||||
|
||||
query = f"""
|
||||
SELECT
|
||||
COUNT(1) OVER () AS full_count,
|
||||
sessions_notes.*,
|
||||
users.name AS user_name
|
||||
FROM
|
||||
sessions_notes
|
||||
INNER JOIN
|
||||
users USING (user_id)
|
||||
WHERE
|
||||
{" AND ".join(conditions)}
|
||||
ORDER BY
|
||||
created_at {data.order}
|
||||
LIMIT
|
||||
%(limit)s OFFSET %(offset)s;
|
||||
"""
|
||||
params.update({
|
||||
"limit": data.limit,
|
||||
"offset": data.limit * (data.page - 1)
|
||||
})
|
||||
|
||||
query = cur.mogrify(query, params)
|
||||
logger.debug(query)
|
||||
cur.execute(query)
|
||||
rows = cur.fetchall()
|
||||
|
||||
result = {"count": 0, "notes": helper.list_to_camel_case(rows)}
|
||||
if rows:
|
||||
result["count"] = rows[0]["fullCount"]
|
||||
for row in rows:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
row.pop("fullCount")
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNoteSchema):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public, thumbnail, start_at, end_at)
|
||||
VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s, %(thumbnail)s, %(start_at)s, %(end_at)s)
|
||||
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""",
|
||||
{"user_id": user_id, "project_id": project_id, "session_id": session_id,
|
||||
**data.model_dump()})
|
||||
cur.execute(query)
|
||||
result = helper.dict_to_camel_case(cur.fetchone())
|
||||
if result:
|
||||
result["createdAt"] = TimeUTC.datetime_to_timestamp(result["createdAt"])
|
||||
return result
|
||||
|
||||
|
||||
def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNoteSchema):
|
||||
sub_query = []
|
||||
if data.message is not None:
|
||||
sub_query.append("message = %(message)s")
|
||||
if data.tag is not None and len(data.tag) > 0:
|
||||
sub_query.append("tag = %(tag)s")
|
||||
if data.is_public is not None:
|
||||
sub_query.append("is_public = %(is_public)s")
|
||||
if data.timestamp is not None:
|
||||
sub_query.append("timestamp = %(timestamp)s")
|
||||
|
||||
sub_query.append("updated_at = timezone('utc'::text, now())")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""UPDATE public.sessions_notes
|
||||
SET
|
||||
{" ,".join(sub_query)}
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
AND user_id = %(user_id)s
|
||||
AND note_id = %(note_id)s
|
||||
AND deleted_at ISNULL
|
||||
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""",
|
||||
{"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.model_dump()})
|
||||
)
|
||||
row = helper.dict_to_camel_case(cur.fetchone())
|
||||
if row:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
return row
|
||||
return {"errors": ["Note not found"]}
|
||||
|
||||
|
||||
def delete(project_id, note_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(""" UPDATE public.sessions_notes
|
||||
SET deleted_at = timezone('utc'::text, now())
|
||||
WHERE note_id = %(note_id)s
|
||||
AND project_id = %(project_id)s
|
||||
AND deleted_at ISNULL;""",
|
||||
{"project_id": project_id, "note_id": note_id})
|
||||
)
|
||||
return {"data": {"state": "success"}}
|
||||
|
||||
|
||||
def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id):
|
||||
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
|
||||
if note is None:
|
||||
return {"errors": ["Note not found"]}
|
||||
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}")
|
||||
if note["timestamp"] > 0:
|
||||
session_url += f"&jumpto={note['timestamp']}"
|
||||
title = f"<{session_url}|Note for session {note['sessionId']}>"
|
||||
|
||||
blocks = [{"type": "section",
|
||||
"fields": [{"type": "mrkdwn",
|
||||
"text": title}]},
|
||||
{"type": "section",
|
||||
"fields": [{"type": "plain_text",
|
||||
"text": note["message"]}]}]
|
||||
if note["tag"]:
|
||||
blocks.append({"type": "context",
|
||||
"elements": [{"type": "plain_text",
|
||||
"text": f"Tag: *{note['tag']}*"}]})
|
||||
bottom = f"Created by {note['userName'].capitalize()}"
|
||||
if user_id != note["userId"]:
|
||||
bottom += f"\nSent by {note['shareName']}: "
|
||||
blocks.append({"type": "context",
|
||||
"elements": [{"type": "plain_text",
|
||||
"text": bottom}]})
|
||||
return Slack.send_raw(
|
||||
tenant_id=tenant_id,
|
||||
webhook_id=webhook_id,
|
||||
body={"blocks": blocks}
|
||||
)
|
||||
|
||||
|
||||
def share_to_msteams(tenant_id, user_id, project_id, note_id, webhook_id):
|
||||
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
|
||||
if note is None:
|
||||
return {"errors": ["Note not found"]}
|
||||
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}")
|
||||
if note["timestamp"] > 0:
|
||||
session_url += f"&jumpto={note['timestamp']}"
|
||||
title = f"[Note for session {note['sessionId']}]({session_url})"
|
||||
|
||||
blocks = [{
|
||||
"type": "TextBlock",
|
||||
"text": title,
|
||||
"style": "heading",
|
||||
"size": "Large"
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"spacing": "Small",
|
||||
"text": note["message"]
|
||||
}
|
||||
]
|
||||
if note["tag"]:
|
||||
blocks.append({"type": "TextBlock",
|
||||
"spacing": "Small",
|
||||
"text": f"Tag: *{note['tag']}*",
|
||||
"size": "Small"})
|
||||
bottom = f"Created by {note['userName'].capitalize()}"
|
||||
if user_id != note["userId"]:
|
||||
bottom += f"\nSent by {note['shareName']}: "
|
||||
blocks.append({"type": "TextBlock",
|
||||
"spacing": "Default",
|
||||
"text": bottom,
|
||||
"size": "Small",
|
||||
"fontType": "Monospace"})
|
||||
return MSTeams.send_raw(
|
||||
tenant_id=tenant_id,
|
||||
webhook_id=webhook_id,
|
||||
body={"type": "message",
|
||||
"attachments": [
|
||||
{"contentType": "application/vnd.microsoft.card.adaptive",
|
||||
"contentUrl": None,
|
||||
"content": {
|
||||
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
|
||||
"type": "AdaptiveCard",
|
||||
"version": "1.5",
|
||||
"body": [{
|
||||
"type": "ColumnSet",
|
||||
"style": "emphasis",
|
||||
"separator": True,
|
||||
"bleed": True,
|
||||
"columns": [{"width": "stretch",
|
||||
"items": blocks,
|
||||
"type": "Column"}]
|
||||
}]}}
|
||||
]})
|
||||
|
|
@ -2,8 +2,7 @@ import logging
|
|||
from typing import List, Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core.events import events
|
||||
from chalicelib.core import metadata
|
||||
from chalicelib.core import events, metadata
|
||||
from . import performance_event
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
|
@ -144,12 +143,12 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
for e in data.events:
|
||||
if e.type == schemas.EventType.LOCATION:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
"operator": e.operator,
|
||||
"filters": []
|
||||
"filters": e.filters
|
||||
})
|
||||
for v in e.value:
|
||||
if v not in extra_conditions[e.operator].value:
|
||||
|
|
@ -161,12 +160,12 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
for e in data.events:
|
||||
if e.type == schemas.EventType.REQUEST_DETAILS:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
"operator": e.operator,
|
||||
"filters": []
|
||||
"filters": e.filters
|
||||
})
|
||||
for v in e.value:
|
||||
if v not in extra_conditions[e.operator].value:
|
||||
|
|
@ -274,7 +273,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
return sessions
|
||||
|
||||
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema):
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
|
||||
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
|
||||
schemas.EventType.GRAPHQL] \
|
||||
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
|
||||
|
|
@ -440,7 +439,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
extra_constraints.append(
|
||||
sh.multi_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not,
|
||||
value_key=f_k))
|
||||
elif filter_type == schemas.FilterType.METADATA:
|
||||
elif filter_type == events.EventType.METADATA.ui_type:
|
||||
# get metadata list only if you need it
|
||||
if meta_keys is None:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
|
|
@ -581,36 +580,36 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
**sh.multi_values(event.value, value_key=e_k),
|
||||
**sh.multi_values(event.source, value_key=s_k)}
|
||||
|
||||
if event_type == schemas.EventType.CLICK:
|
||||
if event_type == events.EventType.CLICK.ui_type:
|
||||
if platform == "web":
|
||||
event_from = event_from % f"events.clicks AS main "
|
||||
event_from = event_from % f"{events.EventType.CLICK.table} AS main "
|
||||
if not is_any:
|
||||
if schemas.ClickEventExtraOperator.has_value(event.operator):
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.selector {op} %({e_k})s", event.value, value_key=e_k))
|
||||
else:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.label {op} %({e_k})s", event.value,
|
||||
sh.multi_conditions(f"main.{events.EventType.CLICK.column} {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
else:
|
||||
event_from = event_from % f"events_ios.taps AS main "
|
||||
event_from = event_from % f"{events.EventType.CLICK_MOBILE.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.label {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.{events.EventType.CLICK_MOBILE.column} {op} %({e_k})s",
|
||||
event.value,
|
||||
value_key=e_k))
|
||||
|
||||
elif event_type == schemas.EventType.TAG:
|
||||
event_from = event_from % f"events.tags AS main "
|
||||
elif event_type == events.EventType.TAG.ui_type:
|
||||
event_from = event_from % f"{events.EventType.TAG.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.tag_id = %({e_k})s", event.value, value_key=e_k))
|
||||
elif event_type == schemas.EventType.INPUT:
|
||||
elif event_type == events.EventType.INPUT.ui_type:
|
||||
if platform == "web":
|
||||
event_from = event_from % f"events.inputs AS main "
|
||||
event_from = event_from % f"{events.EventType.INPUT.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.label {op} %({e_k})s", event.value,
|
||||
sh.multi_conditions(f"main.{events.EventType.INPUT.column} {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
if event.source is not None and len(event.source) > 0:
|
||||
event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source,
|
||||
|
|
@ -618,53 +617,53 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
full_args = {**full_args, **sh.multi_values(event.source, value_key=f"custom{i}")}
|
||||
|
||||
else:
|
||||
event_from = event_from % f"events_ios.inputs AS main "
|
||||
event_from = event_from % f"{events.EventType.INPUT_MOBILE.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.label {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.{events.EventType.INPUT_MOBILE.column} {op} %({e_k})s",
|
||||
event.value,
|
||||
value_key=e_k))
|
||||
|
||||
|
||||
elif event_type == schemas.EventType.LOCATION:
|
||||
elif event_type == events.EventType.LOCATION.ui_type:
|
||||
if platform == "web":
|
||||
event_from = event_from % f"events.pages AS main "
|
||||
event_from = event_from % f"{events.EventType.LOCATION.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.path {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
else:
|
||||
event_from = event_from % f"events_ios.views AS main "
|
||||
event_from = event_from % f"{events.EventType.VIEW_MOBILE.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.name {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.{events.EventType.VIEW_MOBILE.column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
elif event_type == schemas.EventType.CUSTOM:
|
||||
event_from = event_from % f"events_common.customs AS main "
|
||||
elif event_type == events.EventType.CUSTOM.ui_type:
|
||||
event_from = event_from % f"{events.EventType.CUSTOM.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.name {op} %({e_k})s", event.value,
|
||||
sh.multi_conditions(f"main.{events.EventType.CUSTOM.column} {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
elif event_type == schemas.EventType.REQUEST:
|
||||
event_from = event_from % f"events_common.requests AS main "
|
||||
elif event_type == events.EventType.REQUEST.ui_type:
|
||||
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.path {op} %({e_k})s", event.value,
|
||||
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
# elif event_type == schemas.event_type.GRAPHQL:
|
||||
# elif event_type == events.event_type.GRAPHQL.ui_type:
|
||||
# event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main "
|
||||
# if not is_any:
|
||||
# event_where.append(
|
||||
# _multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k})s", event.value,
|
||||
# value_key=e_k))
|
||||
elif event_type == schemas.EventType.STATE_ACTION:
|
||||
event_from = event_from % f"events.state_actions AS main "
|
||||
elif event_type == events.EventType.STATEACTION.ui_type:
|
||||
event_from = event_from % f"{events.EventType.STATEACTION.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.name {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.{events.EventType.STATEACTION.column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
elif event_type == schemas.EventType.ERROR:
|
||||
event_from = event_from % f"events.errors AS main INNER JOIN public.errors AS main1 USING(error_id)"
|
||||
elif event_type == events.EventType.ERROR.ui_type:
|
||||
event_from = event_from % f"{events.EventType.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)"
|
||||
event.source = list(set(event.source))
|
||||
if not is_any and event.value not in [None, "*", ""]:
|
||||
event_where.append(
|
||||
|
|
@ -675,59 +674,59 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
|
||||
|
||||
# ----- Mobile
|
||||
elif event_type == schemas.EventType.CLICK_MOBILE:
|
||||
event_from = event_from % f"events_ios.taps AS main "
|
||||
elif event_type == events.EventType.CLICK_MOBILE.ui_type:
|
||||
event_from = event_from % f"{events.EventType.CLICK_MOBILE.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.label {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.{events.EventType.CLICK_MOBILE.column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
|
||||
elif event_type == schemas.EventType.INPUT_MOBILE:
|
||||
event_from = event_from % f"events_ios.inputs AS main "
|
||||
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
|
||||
event_from = event_from % f"{events.EventType.INPUT_MOBILE.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.label {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.{events.EventType.INPUT_MOBILE.column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
if event.source is not None and len(event.source) > 0:
|
||||
event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source,
|
||||
value_key="custom{i}"))
|
||||
full_args = {**full_args, **sh.multi_values(event.source, f"custom{i}")}
|
||||
elif event_type == schemas.EventType.VIEW_MOBILE:
|
||||
event_from = event_from % f"events_ios.views AS main "
|
||||
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
|
||||
event_from = event_from % f"{events.EventType.VIEW_MOBILE.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.name {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.{events.EventType.VIEW_MOBILE.column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
elif event_type == schemas.EventType.CUSTOM_MOBILE:
|
||||
event_from = event_from % f"events_common.customs AS main "
|
||||
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
|
||||
event_from = event_from % f"{events.EventType.CUSTOM_MOBILE.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.name {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.{events.EventType.CUSTOM_MOBILE.column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
elif event_type == schemas.EventType.REQUEST_MOBILE:
|
||||
event_from = event_from % f"events_common.requests AS main "
|
||||
elif event_type == events.EventType.REQUEST_MOBILE.ui_type:
|
||||
event_from = event_from % f"{events.EventType.REQUEST_MOBILE.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.path {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.{events.EventType.REQUEST_MOBILE.column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
elif event_type == schemas.EventType.ERROR_MOBILE:
|
||||
event_from = event_from % f"events_common.crashes AS main INNER JOIN public.crashes_ios AS main1 USING(crash_ios_id)"
|
||||
elif event_type == events.EventType.CRASH_MOBILE.ui_type:
|
||||
event_from = event_from % f"{events.EventType.CRASH_MOBILE.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_ios_id)"
|
||||
if not is_any and event.value not in [None, "*", ""]:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"(main1.reason {op} %({e_k})s OR main1.name {op} %({e_k})s)",
|
||||
event.value, value_key=e_k))
|
||||
elif event_type == schemas.EventType.SWIPE_MOBILE and platform != "web":
|
||||
event_from = event_from % f"events_ios.swipes AS main "
|
||||
elif event_type == events.EventType.SWIPE_MOBILE.ui_type and platform != "web":
|
||||
event_from = event_from % f"{events.EventType.SWIPE_MOBILE.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.label {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.{events.EventType.SWIPE_MOBILE.column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
|
||||
elif event_type == schemas.PerformanceEventType.FETCH_FAILED:
|
||||
event_from = event_from % f"events_common.requests AS main "
|
||||
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.path {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
col = performance_event.get_col(event_type)
|
||||
colname = col["column"]
|
||||
|
|
@ -752,7 +751,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD,
|
||||
schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE
|
||||
]:
|
||||
event_from = event_from % f"events.pages AS main "
|
||||
event_from = event_from % f"{events.EventType.LOCATION.table} AS main "
|
||||
col = performance_event.get_col(event_type)
|
||||
colname = col["column"]
|
||||
tname = "main"
|
||||
|
|
@ -763,7 +762,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
f"{tname}.timestamp <= %(endDate)s"]
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.path {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
e_k += "_custom"
|
||||
full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)}
|
||||
|
|
@ -773,7 +772,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
event.source, value_key=e_k))
|
||||
|
||||
elif event_type == schemas.EventType.REQUEST_DETAILS:
|
||||
event_from = event_from % f"events_common.requests AS main "
|
||||
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
|
||||
apply = False
|
||||
for j, f in enumerate(event.filters):
|
||||
is_any = sh.isAny_opreator(f.operator)
|
||||
|
|
@ -785,7 +784,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
|
||||
if f.type == schemas.FetchFilterType.FETCH_URL:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.path {op} %({e_k_f})s::text",
|
||||
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k_f})s::text",
|
||||
f.value, value_key=e_k_f))
|
||||
apply = True
|
||||
elif f.type == schemas.FetchFilterType.FETCH_STATUS_CODE:
|
||||
|
|
@ -817,7 +816,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
if not apply:
|
||||
continue
|
||||
elif event_type == schemas.EventType.GRAPHQL:
|
||||
event_from = event_from % f"events.graphql AS main "
|
||||
event_from = event_from % f"{events.EventType.GRAPHQL.table} AS main "
|
||||
for j, f in enumerate(event.filters):
|
||||
is_any = sh.isAny_opreator(f.operator)
|
||||
if is_any or len(f.value) == 0:
|
||||
|
|
@ -828,7 +827,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
|
||||
if f.type == schemas.GraphqlFilterType.GRAPHQL_NAME:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.name {op} %({e_k_f})s", f.value,
|
||||
sh.multi_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value,
|
||||
value_key=e_k_f))
|
||||
elif f.type == schemas.GraphqlFilterType.GRAPHQL_METHOD:
|
||||
event_where.append(
|
||||
|
|
@ -909,7 +908,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
# b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')")
|
||||
|
||||
if errors_only:
|
||||
extra_from += f" INNER JOIN events.errors AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
|
||||
extra_from += f" INNER JOIN {events.EventType.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
|
||||
extra_constraints.append("ser.source = 'js_exception'")
|
||||
extra_constraints.append("ser.project_id = %(project_id)s")
|
||||
# if error_status != schemas.ErrorStatus.all:
|
||||
|
|
@ -985,12 +984,12 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
c.value = helper.values_for_operator(value=c.value, op=c.operator)
|
||||
full_args = {**full_args,
|
||||
**sh.multi_values(c.value, value_key=e_k)}
|
||||
if c.type == schemas.EventType.LOCATION:
|
||||
if c.type == events.EventType.LOCATION.ui_type:
|
||||
_extra_or_condition.append(
|
||||
sh.multi_conditions(f"ev.path {op} %({e_k})s",
|
||||
sh.multi_conditions(f"ev.{events.EventType.LOCATION.column} {op} %({e_k})s",
|
||||
c.value, value_key=e_k))
|
||||
else:
|
||||
logger.warning(f"unsupported extra_event type:${c.type}")
|
||||
logger.warning(f"unsupported extra_event type: {c.type}")
|
||||
if len(_extra_or_condition) > 0:
|
||||
extra_constraints.append("(" + " OR ".join(_extra_or_condition) + ")")
|
||||
query_part = f"""\
|
||||
|
|
@ -1045,15 +1044,18 @@ def get_user_sessions(project_id, user_id, start_date, end_date):
|
|||
def get_session_user(project_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
""" \
|
||||
SELECT user_id,
|
||||
count(*) as session_count,
|
||||
max(start_ts) as last_seen,
|
||||
min(start_ts) as first_seen
|
||||
FROM "public".sessions
|
||||
WHERE project_id = %(project_id)s
|
||||
AND user_id = %(userId)s
|
||||
AND duration is not null
|
||||
"""\
|
||||
SELECT
|
||||
user_id,
|
||||
count(*) as session_count,
|
||||
max(start_ts) as last_seen,
|
||||
min(start_ts) as first_seen
|
||||
FROM
|
||||
"public".sessions
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
AND user_id = %(userId)s
|
||||
AND duration is not null
|
||||
GROUP BY user_id;
|
||||
""",
|
||||
{"project_id": project_id, "userId": user_id}
|
||||
|
|
@ -1072,10 +1074,11 @@ def count_all():
|
|||
|
||||
def session_exists(project_id, session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify("""SELECT 1
|
||||
FROM public.sessions
|
||||
WHERE session_id = %(session_id)s
|
||||
AND project_id = %(project_id)s LIMIT 1;""",
|
||||
query = cur.mogrify("""SELECT 1
|
||||
FROM public.sessions
|
||||
WHERE session_id=%(session_id)s
|
||||
AND project_id=%(project_id)s
|
||||
LIMIT 1;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
cur.execute(query)
|
||||
row = cur.fetchone()
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import schemas
|
||||
from chalicelib.core import metadata, assist, canvas, user_testing
|
||||
from chalicelib.core.issues import issues
|
||||
from chalicelib.core.events import events, events_mobile
|
||||
from chalicelib.core import events, metadata, events_mobile, \
|
||||
issues, assist, canvas, user_testing
|
||||
from . import sessions_mobs, sessions_devtool
|
||||
from chalicelib.core.errors.modules import errors_helper
|
||||
from chalicelib.utils import pg_client, helper
|
||||
|
|
@ -129,8 +128,30 @@ def get_events(project_id, session_id):
|
|||
data['userTesting'] = user_testing.get_test_signals(session_id=session_id, project_id=project_id)
|
||||
|
||||
data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id)
|
||||
data['issues'] = issues.reduce_issues(data['issues'])
|
||||
data['incidents'] = events.get_incidents_by_session_id(session_id=session_id, project_id=project_id)
|
||||
data['issues'] = reduce_issues(data['issues'])
|
||||
return data
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
# To reduce the number of issues in the replay;
|
||||
# will be removed once we agree on how to show issues
|
||||
def reduce_issues(issues_list):
|
||||
if issues_list is None:
|
||||
return None
|
||||
i = 0
|
||||
# remove same-type issues if the time between them is <2s
|
||||
while i < len(issues_list) - 1:
|
||||
for j in range(i + 1, len(issues_list)):
|
||||
if issues_list[i]["type"] == issues_list[j]["type"]:
|
||||
break
|
||||
else:
|
||||
i += 1
|
||||
break
|
||||
|
||||
if issues_list[i]["timestamp"] - issues_list[j]["timestamp"] < 2000:
|
||||
issues_list.pop(j)
|
||||
else:
|
||||
i += 1
|
||||
|
||||
return issues_list
|
||||
|
|
|
|||
|
|
@ -40,8 +40,7 @@ COALESCE((SELECT TRUE
|
|||
# This function executes the query and return result
|
||||
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.ProjectContext,
|
||||
user_id, errors_only=False, error_status=schemas.ErrorStatus.ALL,
|
||||
count_only=False, issue=None, ids_only=False):
|
||||
platform = project.platform
|
||||
count_only=False, issue=None, ids_only=False, platform="web"):
|
||||
if data.bookmarked:
|
||||
data.startTimestamp, data.endTimestamp = sessions_favorite.get_start_end_timestamp(project.project_id, user_id)
|
||||
if data.startTimestamp is None:
|
||||
|
|
@ -49,7 +48,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
return {
|
||||
'total': 0,
|
||||
'sessions': [],
|
||||
'_src': 1
|
||||
'src': 1
|
||||
}
|
||||
full_args, query_part = sessions_legacy.search_query_parts(data=data, error_status=error_status,
|
||||
errors_only=errors_only,
|
||||
|
|
@ -177,7 +176,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
return {
|
||||
'total': total,
|
||||
'sessions': helper.list_to_camel_case(sessions),
|
||||
'_src': 1
|
||||
'src': 1
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -240,7 +239,6 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
|
|||
cur.execute("\nUNION\n".join(sub_queries))
|
||||
rows = cur.fetchall()
|
||||
for i in rows:
|
||||
i["_src"] = 1
|
||||
results[str(i["project_id"])]["sessions"].append(helper.dict_to_camel_case(i))
|
||||
return results
|
||||
|
||||
|
|
@ -248,7 +246,7 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
|
|||
def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 'session_id',
|
||||
ascending: bool = False) -> dict:
|
||||
if session_ids is None or len(session_ids) == 0:
|
||||
return {"total": 0, "sessions": [], "_src": 1}
|
||||
return {"total": 0, "sessions": []}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
params = {"project_id": project_id, "session_ids": tuple(session_ids)}
|
||||
|
|
@ -267,4 +265,4 @@ def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 's
|
|||
s["metadata"] = {}
|
||||
for m in meta_keys:
|
||||
s["metadata"][m["key"]] = s.pop(f'metadata_{m["index"]}')
|
||||
return {"total": len(rows), "sessions": helper.list_to_camel_case(rows), "_src": 1}
|
||||
return {"total": len(rows), "sessions": helper.list_to_camel_case(rows)}
|
||||
|
|
@ -1,2 +1 @@
|
|||
from .sessions_viewed import *
|
||||
from .sessions_viewed_ch import *
|
||||
from .sessions_viewed import *
|
||||
|
|
@ -87,7 +87,7 @@ async def create_tenant(data: schemas.UserSignupSchema):
|
|||
"spotRefreshToken": r.pop("spotRefreshToken"),
|
||||
"spotRefreshTokenMaxAge": r.pop("spotRefreshTokenMaxAge"),
|
||||
'data': {
|
||||
"scopeState": 2,
|
||||
"scopeState": 0,
|
||||
"user": r
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,3 +11,9 @@ if smtp.has_smtp():
|
|||
logger.info("valid SMTP configuration found")
|
||||
else:
|
||||
logger.info("no SMTP configuration found or SMTP validation failed")
|
||||
|
||||
if config("EXP_CH_DRIVER", cast=bool, default=True):
|
||||
logging.info(">>> Using new CH driver")
|
||||
from . import ch_client_exp as ch_client
|
||||
else:
|
||||
from . import ch_client
|
||||
|
|
|
|||
|
|
@ -1,185 +1,73 @@
|
|||
import logging
|
||||
import threading
|
||||
import time
|
||||
from functools import wraps
|
||||
from queue import Queue, Empty
|
||||
|
||||
import clickhouse_connect
|
||||
from clickhouse_connect.driver.query import QueryContext
|
||||
import clickhouse_driver
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_CH_CONFIG = {"host": config("ch_host"),
|
||||
"user": config("ch_user", default="default"),
|
||||
"password": config("ch_password", default=""),
|
||||
"port": config("ch_port_http", cast=int),
|
||||
"client_name": config("APP_NAME", default="PY")}
|
||||
CH_CONFIG = dict(_CH_CONFIG)
|
||||
|
||||
settings = {}
|
||||
if config('ch_timeout', cast=int, default=-1) > 0:
|
||||
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
logger.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
|
||||
|
||||
if config('ch_receive_timeout', cast=int, default=-1) > 0:
|
||||
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
logger.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
|
||||
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
|
||||
|
||||
def transform_result(self, original_function):
|
||||
@wraps(original_function)
|
||||
def wrapper(*args, **kwargs):
|
||||
if kwargs.get("parameters"):
|
||||
if config("LOCAL_DEV", cast=bool, default=False):
|
||||
logger.debug(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters")))
|
||||
else:
|
||||
logger.debug(
|
||||
str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
|
||||
elif len(args) > 0:
|
||||
if config("LOCAL_DEV", cast=bool, default=False):
|
||||
logger.debug(args[0])
|
||||
else:
|
||||
logger.debug(str.encode(args[0]))
|
||||
result = original_function(*args, **kwargs)
|
||||
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
|
||||
column_names = result.column_names
|
||||
result = result.result_rows
|
||||
result = [dict(zip(column_names, row)) for row in result]
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class ClickHouseConnectionPool:
|
||||
def __init__(self, min_size, max_size):
|
||||
self.min_size = min_size
|
||||
self.max_size = max_size
|
||||
self.pool = Queue()
|
||||
self.lock = threading.Lock()
|
||||
self.total_connections = 0
|
||||
|
||||
# Initialize the pool with min_size connections
|
||||
for _ in range(self.min_size):
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.pool.put(client)
|
||||
self.total_connections += 1
|
||||
|
||||
def get_connection(self):
|
||||
try:
|
||||
# Try to get a connection without blocking
|
||||
client = self.pool.get_nowait()
|
||||
return client
|
||||
except Empty:
|
||||
with self.lock:
|
||||
if self.total_connections < self.max_size:
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.total_connections += 1
|
||||
return client
|
||||
# If max_size reached, wait until a connection is available
|
||||
client = self.pool.get()
|
||||
return client
|
||||
|
||||
def release_connection(self, client):
|
||||
self.pool.put(client)
|
||||
|
||||
def close_all(self):
|
||||
with self.lock:
|
||||
while not self.pool.empty():
|
||||
client = self.pool.get()
|
||||
client.close()
|
||||
self.total_connections = 0
|
||||
|
||||
|
||||
CH_pool: ClickHouseConnectionPool = None
|
||||
|
||||
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
|
||||
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
|
||||
RETRY = 0
|
||||
|
||||
|
||||
def make_pool():
|
||||
if not config('CH_POOL', cast=bool, default=True):
|
||||
return
|
||||
global CH_pool
|
||||
global RETRY
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
try:
|
||||
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
|
||||
max_size=config("CH_MAXCONN", cast=int, default=8))
|
||||
if CH_pool is not None:
|
||||
logger.info("Connection pool created successfully for CH")
|
||||
except ConnectionError as error:
|
||||
logger.error("Error while connecting to CH", exc_info=error)
|
||||
if RETRY < RETRY_MAX:
|
||||
RETRY += 1
|
||||
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
time.sleep(RETRY_INTERVAL)
|
||||
make_pool()
|
||||
else:
|
||||
raise error
|
||||
|
||||
|
||||
class ClickHouseClient:
|
||||
__client = None
|
||||
|
||||
def __init__(self, database=None):
|
||||
if self.__client is None:
|
||||
if database is not None or not config('CH_POOL', cast=bool, default=True):
|
||||
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=database if database else config("ch_database",
|
||||
default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
|
||||
else:
|
||||
self.__client = CH_pool.get_connection()
|
||||
|
||||
self.__client.execute = transform_result(self, self.__client.query)
|
||||
self.__client.format = self.format
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
self.__client = clickhouse_driver.Client(host=config("ch_host"),
|
||||
database=database if database else config("ch_database",
|
||||
default="default"),
|
||||
user=config("ch_user", default="default"),
|
||||
password=config("ch_password", default=""),
|
||||
port=config("ch_port", cast=int),
|
||||
settings=settings,
|
||||
**extra_args) \
|
||||
if self.__client is None else self.__client
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def execute(self, query, parameters=None, **args):
|
||||
try:
|
||||
results = self.__client.execute(query=query, params=parameters, with_column_types=True, **args)
|
||||
keys = tuple(x for x, y in results[1])
|
||||
return [dict(zip(keys, i)) for i in results[0]]
|
||||
except Exception as err:
|
||||
logger.error("--------- CH EXCEPTION -----------", exc_info=err)
|
||||
logger.error("--------- CH QUERY EXCEPTION -----------")
|
||||
logger.error(self.format(query=query, parameters=parameters)
|
||||
.replace('\n', '\\n')
|
||||
.replace(' ', ' ')
|
||||
.replace(' ', ' '))
|
||||
logger.error("--------------------")
|
||||
raise err
|
||||
|
||||
def insert(self, query, params=None, **args):
|
||||
return self.__client.execute(query=query, params=params, **args)
|
||||
|
||||
def client(self):
|
||||
return self.__client
|
||||
|
||||
def format(self, query, parameters=None):
|
||||
if parameters:
|
||||
ctx = QueryContext(query=query, parameters=parameters)
|
||||
return ctx.final_query
|
||||
return query
|
||||
def format(self, query, parameters):
|
||||
if parameters is None:
|
||||
return query
|
||||
return self.__client.substitute_params(query, parameters, self.__client.connection.context)
|
||||
|
||||
def __exit__(self, *args):
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
CH_pool.release_connection(self.__client)
|
||||
else:
|
||||
self.__client.close()
|
||||
pass
|
||||
|
||||
|
||||
async def init():
|
||||
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
make_pool()
|
||||
logger.info(f">CH_POOL:not defined")
|
||||
|
||||
|
||||
async def terminate():
|
||||
global CH_pool
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
logger.info("Closed all connexions to CH")
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
pass
|
||||
|
|
|
|||
178
api/chalicelib/utils/ch_client_exp.py
Normal file
178
api/chalicelib/utils/ch_client_exp.py
Normal file
|
|
@ -0,0 +1,178 @@
|
|||
import logging
|
||||
import threading
|
||||
import time
|
||||
from functools import wraps
|
||||
from queue import Queue, Empty
|
||||
|
||||
import clickhouse_connect
|
||||
from clickhouse_connect.driver.query import QueryContext
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_CH_CONFIG = {"host": config("ch_host"),
|
||||
"user": config("ch_user", default="default"),
|
||||
"password": config("ch_password", default=""),
|
||||
"port": config("ch_port_http", cast=int),
|
||||
"client_name": config("APP_NAME", default="PY")}
|
||||
CH_CONFIG = dict(_CH_CONFIG)
|
||||
|
||||
settings = {}
|
||||
if config('ch_timeout', cast=int, default=-1) > 0:
|
||||
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
|
||||
|
||||
if config('ch_receive_timeout', cast=int, default=-1) > 0:
|
||||
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
|
||||
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
|
||||
|
||||
def transform_result(self, original_function):
|
||||
@wraps(original_function)
|
||||
def wrapper(*args, **kwargs):
|
||||
if kwargs.get("parameters"):
|
||||
logger.debug(str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
|
||||
elif len(args) > 0:
|
||||
logger.debug(str.encode(args[0]))
|
||||
result = original_function(*args, **kwargs)
|
||||
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
|
||||
column_names = result.column_names
|
||||
result = result.result_rows
|
||||
result = [dict(zip(column_names, row)) for row in result]
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class ClickHouseConnectionPool:
|
||||
def __init__(self, min_size, max_size):
|
||||
self.min_size = min_size
|
||||
self.max_size = max_size
|
||||
self.pool = Queue()
|
||||
self.lock = threading.Lock()
|
||||
self.total_connections = 0
|
||||
|
||||
# Initialize the pool with min_size connections
|
||||
for _ in range(self.min_size):
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.pool.put(client)
|
||||
self.total_connections += 1
|
||||
|
||||
def get_connection(self):
|
||||
try:
|
||||
# Try to get a connection without blocking
|
||||
client = self.pool.get_nowait()
|
||||
return client
|
||||
except Empty:
|
||||
with self.lock:
|
||||
if self.total_connections < self.max_size:
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.total_connections += 1
|
||||
return client
|
||||
# If max_size reached, wait until a connection is available
|
||||
client = self.pool.get()
|
||||
return client
|
||||
|
||||
def release_connection(self, client):
|
||||
self.pool.put(client)
|
||||
|
||||
def close_all(self):
|
||||
with self.lock:
|
||||
while not self.pool.empty():
|
||||
client = self.pool.get()
|
||||
client.close()
|
||||
self.total_connections = 0
|
||||
|
||||
|
||||
CH_pool: ClickHouseConnectionPool = None
|
||||
|
||||
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
|
||||
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
|
||||
RETRY = 0
|
||||
|
||||
|
||||
def make_pool():
|
||||
if not config('CH_POOL', cast=bool, default=True):
|
||||
return
|
||||
global CH_pool
|
||||
global RETRY
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
try:
|
||||
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
|
||||
max_size=config("CH_MAXCONN", cast=int, default=8))
|
||||
if CH_pool is not None:
|
||||
logger.info("Connection pool created successfully for CH")
|
||||
except ConnectionError as error:
|
||||
logger.error("Error while connecting to CH", exc_info=error)
|
||||
if RETRY < RETRY_MAX:
|
||||
RETRY += 1
|
||||
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
time.sleep(RETRY_INTERVAL)
|
||||
make_pool()
|
||||
else:
|
||||
raise error
|
||||
|
||||
|
||||
class ClickHouseClient:
|
||||
__client = None
|
||||
|
||||
def __init__(self, database=None):
|
||||
if self.__client is None:
|
||||
if database is not None or not config('CH_POOL', cast=bool, default=True):
|
||||
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=database if database else config("ch_database",
|
||||
default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
|
||||
else:
|
||||
self.__client = CH_pool.get_connection()
|
||||
|
||||
self.__client.execute = transform_result(self, self.__client.query)
|
||||
self.__client.format = self.format
|
||||
|
||||
def __enter__(self):
|
||||
return self.__client
|
||||
|
||||
def format(self, query, parameters=None):
|
||||
if parameters:
|
||||
ctx = QueryContext(query=query, parameters=parameters)
|
||||
return ctx.final_query
|
||||
return query
|
||||
|
||||
def __exit__(self, *args):
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
CH_pool.release_connection(self.__client)
|
||||
else:
|
||||
self.__client.close()
|
||||
|
||||
|
||||
async def init():
|
||||
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
make_pool()
|
||||
|
||||
|
||||
async def terminate():
|
||||
global CH_pool
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
logger.info("Closed all connexions to CH")
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
|
|
@ -1,13 +1,7 @@
|
|||
import logging
|
||||
import re
|
||||
from typing import Union, Any
|
||||
from typing import Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from schemas import SearchEventOperator
|
||||
import math
|
||||
import struct
|
||||
from decimal import Decimal
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -56,8 +50,7 @@ def get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEvent
|
|||
schemas.EventType.ERROR: "ERROR",
|
||||
schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD: 'PERFORMANCE',
|
||||
schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE: 'PERFORMANCE',
|
||||
schemas.FetchFilterType.FETCH_URL: 'REQUEST',
|
||||
schemas.EventType.INCIDENT: "INCIDENT",
|
||||
schemas.FetchFilterType.FETCH_URL: 'REQUEST'
|
||||
}
|
||||
defs_mobile = {
|
||||
schemas.EventType.CLICK_MOBILE: "TAP",
|
||||
|
|
@ -66,170 +59,10 @@ def get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEvent
|
|||
schemas.EventType.REQUEST_MOBILE: "REQUEST",
|
||||
schemas.EventType.ERROR_MOBILE: "CRASH",
|
||||
schemas.EventType.VIEW_MOBILE: "VIEW",
|
||||
schemas.EventType.SWIPE_MOBILE: "SWIPE",
|
||||
schemas.EventType.INCIDENT: "INCIDENT"
|
||||
schemas.EventType.SWIPE_MOBILE: "SWIPE"
|
||||
}
|
||||
if platform != "web" and event_type in defs_mobile:
|
||||
return defs_mobile.get(event_type)
|
||||
if event_type not in defs:
|
||||
raise Exception(f"unsupported EventType:{event_type}")
|
||||
return defs.get(event_type)
|
||||
|
||||
|
||||
# AI generated
|
||||
def simplify_clickhouse_type(ch_type: str) -> str:
|
||||
"""
|
||||
Simplify a ClickHouse data type name to a broader category like:
|
||||
int, float, decimal, datetime, string, uuid, enum, array, tuple, map, nested, etc.
|
||||
"""
|
||||
|
||||
# 1) Strip out common wrappers like Nullable(...) or LowCardinality(...)
|
||||
# Possibly multiple wrappers: e.g. "LowCardinality(Nullable(Int32))"
|
||||
pattern_wrappers = re.compile(r'(Nullable|LowCardinality)\((.*)\)')
|
||||
while True:
|
||||
match = pattern_wrappers.match(ch_type)
|
||||
if match:
|
||||
ch_type = match.group(2)
|
||||
else:
|
||||
break
|
||||
|
||||
# 2) Normalize (lowercase) for easier checks
|
||||
normalized_type = ch_type.lower()
|
||||
|
||||
# 3) Use pattern matching or direct checks for known categories
|
||||
# (You can adapt this as you see fit for your environment.)
|
||||
|
||||
# Integers: Int8, Int16, Int32, Int64, Int128, Int256, UInt8, UInt16, ...
|
||||
if re.match(r'^(u?int)(8|16|32|64|128|256)$', normalized_type):
|
||||
return "int"
|
||||
|
||||
# Floats: Float32, Float64
|
||||
if re.match(r'^float(32|64)|double$', normalized_type):
|
||||
return "float"
|
||||
|
||||
# Decimal: Decimal(P, S)
|
||||
if normalized_type.startswith("decimal"):
|
||||
# return "decimal"
|
||||
return "float"
|
||||
|
||||
# Date/DateTime
|
||||
if normalized_type.startswith("date"):
|
||||
return "datetime"
|
||||
if normalized_type.startswith("datetime"):
|
||||
return "datetime"
|
||||
|
||||
# Strings: String, FixedString(N)
|
||||
if normalized_type.startswith("string"):
|
||||
return "string"
|
||||
if normalized_type.startswith("fixedstring"):
|
||||
return "string"
|
||||
|
||||
# UUID
|
||||
if normalized_type.startswith("uuid"):
|
||||
# return "uuid"
|
||||
return "string"
|
||||
|
||||
# Enums: Enum8(...) or Enum16(...)
|
||||
if normalized_type.startswith("enum8") or normalized_type.startswith("enum16"):
|
||||
# return "enum"
|
||||
return "string"
|
||||
|
||||
# Arrays: Array(T)
|
||||
if normalized_type.startswith("array"):
|
||||
return "array"
|
||||
|
||||
# Tuples: Tuple(T1, T2, ...)
|
||||
if normalized_type.startswith("tuple"):
|
||||
return "tuple"
|
||||
|
||||
# Map(K, V)
|
||||
if normalized_type.startswith("map"):
|
||||
return "map"
|
||||
|
||||
# Nested(...)
|
||||
if normalized_type.startswith("nested"):
|
||||
return "nested"
|
||||
|
||||
# If we didn't match above, just return the original type in lowercase
|
||||
return normalized_type
|
||||
|
||||
|
||||
def simplify_clickhouse_types(ch_types: list[str]) -> list[str]:
|
||||
"""
|
||||
Takes a list of ClickHouse types and returns a list of simplified types
|
||||
by calling `simplify_clickhouse_type` on each.
|
||||
"""
|
||||
return list(set([simplify_clickhouse_type(t) for t in ch_types]))
|
||||
|
||||
|
||||
def get_sub_condition(col_name: str, val_name: str,
|
||||
operator: Union[schemas.SearchEventOperator, schemas.MathOperator]) -> str:
|
||||
if operator == SearchEventOperator.PATTERN:
|
||||
return f"match({col_name}, %({val_name})s)"
|
||||
op = sh.get_sql_operator(operator)
|
||||
return f"{col_name} {op} %({val_name})s"
|
||||
|
||||
|
||||
def get_col_cast(data_type: schemas.PropertyType, value: Any) -> str:
|
||||
if value is None or len(value) == 0:
|
||||
return ""
|
||||
if isinstance(value, list):
|
||||
value = value[0]
|
||||
if data_type in (schemas.PropertyType.INT, schemas.PropertyType.FLOAT):
|
||||
return best_clickhouse_type(value)
|
||||
return data_type.capitalize()
|
||||
|
||||
|
||||
# (type_name, minimum, maximum) – ordered by increasing size
|
||||
_INT_RANGES = [
|
||||
("Int8", -128, 127),
|
||||
("UInt8", 0, 255),
|
||||
("Int16", -32_768, 32_767),
|
||||
("UInt16", 0, 65_535),
|
||||
("Int32", -2_147_483_648, 2_147_483_647),
|
||||
("UInt32", 0, 4_294_967_295),
|
||||
("Int64", -9_223_372_036_854_775_808, 9_223_372_036_854_775_807),
|
||||
("UInt64", 0, 18_446_744_073_709_551_615),
|
||||
]
|
||||
|
||||
|
||||
def best_clickhouse_type(value):
|
||||
"""
|
||||
Return the most compact ClickHouse numeric type that can store *value* loss-lessly.
|
||||
|
||||
"""
|
||||
# Treat bool like tiny int
|
||||
if isinstance(value, bool):
|
||||
value = int(value)
|
||||
|
||||
# --- Integers ---
|
||||
if isinstance(value, int):
|
||||
for name, lo, hi in _INT_RANGES:
|
||||
if lo <= value <= hi:
|
||||
return name
|
||||
# Beyond UInt64: ClickHouse offers Int128 / Int256 or Decimal
|
||||
return "Int128"
|
||||
|
||||
# --- Decimal.Decimal (exact) ---
|
||||
if isinstance(value, Decimal):
|
||||
# ClickHouse Decimal32/64/128 have 9 / 18 / 38 significant digits.
|
||||
digits = len(value.as_tuple().digits)
|
||||
if digits <= 9:
|
||||
return "Decimal32"
|
||||
elif digits <= 18:
|
||||
return "Decimal64"
|
||||
else:
|
||||
return "Decimal128"
|
||||
|
||||
# --- Floats ---
|
||||
if isinstance(value, float):
|
||||
if not math.isfinite(value):
|
||||
return "Float64" # inf / nan → always Float64
|
||||
|
||||
# Check if a round-trip through 32-bit float preserves the bit pattern
|
||||
packed = struct.pack("f", value)
|
||||
if struct.unpack("f", packed)[0] == value:
|
||||
return "Float32"
|
||||
return "Float64"
|
||||
|
||||
raise TypeError(f"Unsupported type: {type(value).__name__}")
|
||||
|
|
|
|||
|
|
@ -99,8 +99,6 @@ def allow_captcha():
|
|||
|
||||
|
||||
def string_to_sql_like(value):
|
||||
if value is None:
|
||||
return None
|
||||
value = re.sub(' +', ' ', value)
|
||||
value = value.replace("*", "%")
|
||||
if value.startswith("^"):
|
||||
|
|
@ -336,3 +334,5 @@ def cast_session_id_to_string(data):
|
|||
for key in keys:
|
||||
data[key] = cast_session_id_to_string(data[key])
|
||||
return data
|
||||
|
||||
|
||||
|
|
|
|||
1
api/chalicelib/utils/or_cache/__init__.py
Normal file
1
api/chalicelib/utils/or_cache/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
from .or_cache import CachedResponse
|
||||
83
api/chalicelib/utils/or_cache/or_cache.py
Normal file
83
api/chalicelib/utils/or_cache/or_cache.py
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
import functools
|
||||
import inspect
|
||||
import json
|
||||
import logging
|
||||
from chalicelib.utils import pg_client
|
||||
import time
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CachedResponse:
|
||||
def __init__(self, table, ttl):
|
||||
self.table = table
|
||||
self.ttl = ttl
|
||||
|
||||
def __call__(self, func):
|
||||
self.param_names = {i: param for i, param in enumerate(inspect.signature(func).parameters)}
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
values = dict()
|
||||
for i, param in self.param_names.items():
|
||||
if i < len(args):
|
||||
values[param] = args[i]
|
||||
elif param in kwargs:
|
||||
values[param] = kwargs[param]
|
||||
else:
|
||||
values[param] = None
|
||||
result = self.__get(values)
|
||||
if result is None or result["expired"] \
|
||||
or result["result"] is None or len(result["result"]) == 0:
|
||||
now = time.time()
|
||||
result = func(*args, **kwargs)
|
||||
now = time.time() - now
|
||||
if result is not None and len(result) > 0:
|
||||
self.__add(values, result, now)
|
||||
result[0]["cached"] = False
|
||||
else:
|
||||
logger.info(f"using cached response for "
|
||||
f"{func.__name__}({','.join([f'{key}={val}' for key, val in enumerate(values)])})")
|
||||
result = result["result"]
|
||||
result[0]["cached"] = True
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
def __get(self, values):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
sub_constraints = []
|
||||
for key, value in values.items():
|
||||
if value is not None:
|
||||
sub_constraints.append(f"{key}=%({key})s")
|
||||
else:
|
||||
sub_constraints.append(f"{key} IS NULL")
|
||||
query = f"""SELECT result,
|
||||
(%(ttl)s>0
|
||||
AND EXTRACT(EPOCH FROM (timezone('utc'::text, now()) - created_at - INTERVAL %(interval)s)) > 0) AS expired
|
||||
FROM {self.table}
|
||||
WHERE {" AND ".join(sub_constraints)}"""
|
||||
query = cur.mogrify(query, {**values, 'ttl': self.ttl, 'interval': f'{self.ttl} seconds'})
|
||||
logger.debug("------")
|
||||
logger.debug(query)
|
||||
logger.debug("------")
|
||||
cur.execute(query)
|
||||
result = cur.fetchone()
|
||||
return result
|
||||
|
||||
def __add(self, values, result, execution_time):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = f"""INSERT INTO {self.table} ({",".join(values.keys())},result,execution_time)
|
||||
VALUES ({",".join([f"%({param})s" for param in values.keys()])},%(result)s,%(execution_time)s)
|
||||
ON CONFLICT ({",".join(values.keys())}) DO UPDATE SET result=%(result)s,
|
||||
execution_time=%(execution_time)s,
|
||||
created_at=timezone('utc'::text, now());"""
|
||||
query = cur.mogrify(query, {**values,
|
||||
"result": json.dumps(jsonable_encoder(result)),
|
||||
"execution_time": execution_time})
|
||||
logger.debug("------")
|
||||
logger.debug(query)
|
||||
logger.debug("------")
|
||||
cur.execute(query)
|
||||
|
|
@ -4,47 +4,48 @@ import schemas
|
|||
|
||||
|
||||
def get_sql_operator(op: Union[schemas.SearchEventOperator, schemas.ClickEventExtraOperator, schemas.MathOperator]):
|
||||
if isinstance(op, Enum):
|
||||
op = op.value
|
||||
return {
|
||||
schemas.SearchEventOperator.IS: "=",
|
||||
schemas.SearchEventOperator.ON: "=",
|
||||
schemas.SearchEventOperator.ON_ANY: "IN",
|
||||
schemas.SearchEventOperator.IS_NOT: "!=",
|
||||
schemas.SearchEventOperator.NOT_ON: "!=",
|
||||
schemas.SearchEventOperator.CONTAINS: "ILIKE",
|
||||
schemas.SearchEventOperator.NOT_CONTAINS: "NOT ILIKE",
|
||||
schemas.SearchEventOperator.STARTS_WITH: "ILIKE",
|
||||
schemas.SearchEventOperator.ENDS_WITH: "ILIKE",
|
||||
# this is not used as an operator, it is used in order to maintain a valid value for conditions
|
||||
schemas.SearchEventOperator.PATTERN: "regex",
|
||||
|
||||
schemas.SearchEventOperator.IS.value: "=",
|
||||
schemas.SearchEventOperator.ON.value: "=",
|
||||
schemas.SearchEventOperator.ON_ANY.value: "IN",
|
||||
schemas.SearchEventOperator.IS_NOT.value: "!=",
|
||||
schemas.SearchEventOperator.NOT_ON.value: "!=",
|
||||
schemas.SearchEventOperator.CONTAINS.value: "ILIKE",
|
||||
schemas.SearchEventOperator.NOT_CONTAINS.value: "NOT ILIKE",
|
||||
schemas.SearchEventOperator.STARTS_WITH.value: "ILIKE",
|
||||
schemas.SearchEventOperator.ENDS_WITH.value: "ILIKE",
|
||||
# Selector operators:
|
||||
schemas.ClickEventExtraOperator.IS: "=",
|
||||
schemas.ClickEventExtraOperator.IS_NOT: "!=",
|
||||
schemas.ClickEventExtraOperator.CONTAINS: "ILIKE",
|
||||
schemas.ClickEventExtraOperator.NOT_CONTAINS: "NOT ILIKE",
|
||||
schemas.ClickEventExtraOperator.STARTS_WITH: "ILIKE",
|
||||
schemas.ClickEventExtraOperator.ENDS_WITH: "ILIKE",
|
||||
schemas.ClickEventExtraOperator.IS.value: "=",
|
||||
schemas.ClickEventExtraOperator.IS_NOT.value: "!=",
|
||||
schemas.ClickEventExtraOperator.CONTAINS.value: "ILIKE",
|
||||
schemas.ClickEventExtraOperator.NOT_CONTAINS.value: "NOT ILIKE",
|
||||
schemas.ClickEventExtraOperator.STARTS_WITH.value: "ILIKE",
|
||||
schemas.ClickEventExtraOperator.ENDS_WITH.value: "ILIKE",
|
||||
|
||||
schemas.MathOperator.GREATER: ">",
|
||||
schemas.MathOperator.GREATER_EQ: ">=",
|
||||
schemas.MathOperator.LESS: "<",
|
||||
schemas.MathOperator.LESS_EQ: "<=",
|
||||
schemas.MathOperator.GREATER.value: ">",
|
||||
schemas.MathOperator.GREATER_EQ.value: ">=",
|
||||
schemas.MathOperator.LESS.value: "<",
|
||||
schemas.MathOperator.LESS_EQ.value: "<=",
|
||||
}.get(op, "=")
|
||||
|
||||
|
||||
def is_negation_operator(op: schemas.SearchEventOperator):
|
||||
return op in [schemas.SearchEventOperator.IS_NOT,
|
||||
schemas.SearchEventOperator.NOT_ON,
|
||||
schemas.SearchEventOperator.NOT_CONTAINS,
|
||||
schemas.ClickEventExtraOperator.IS_NOT,
|
||||
schemas.ClickEventExtraOperator.NOT_CONTAINS]
|
||||
if isinstance(op, Enum):
|
||||
op = op.value
|
||||
return op in [schemas.SearchEventOperator.IS_NOT.value,
|
||||
schemas.SearchEventOperator.NOT_ON.value,
|
||||
schemas.SearchEventOperator.NOT_CONTAINS.value,
|
||||
schemas.ClickEventExtraOperator.IS_NOT.value,
|
||||
schemas.ClickEventExtraOperator.NOT_CONTAINS.value]
|
||||
|
||||
|
||||
def reverse_sql_operator(op):
|
||||
return "=" if op == "!=" else "!=" if op == "=" else "ILIKE" if op == "NOT ILIKE" else "NOT ILIKE"
|
||||
|
||||
|
||||
def multi_conditions(condition, values, value_key="value", is_not=False) -> str:
|
||||
def multi_conditions(condition, values, value_key="value", is_not=False):
|
||||
query = []
|
||||
for i in range(len(values)):
|
||||
k = f"{value_key}_{i}"
|
||||
|
|
@ -52,16 +53,12 @@ def multi_conditions(condition, values, value_key="value", is_not=False) -> str:
|
|||
return "(" + (" AND " if is_not else " OR ").join(query) + ")"
|
||||
|
||||
|
||||
def multi_values(values, value_key="value", data_type: schemas.PropertyType | None = None):
|
||||
def multi_values(values, value_key="value"):
|
||||
query_values = {}
|
||||
if values is not None and isinstance(values, list):
|
||||
for i in range(len(values)):
|
||||
k = f"{value_key}_{i}"
|
||||
query_values[k] = values[i].value if isinstance(values[i], Enum) else values[i]
|
||||
if data_type:
|
||||
if data_type == schemas.PropertyType.STRING:
|
||||
query_values[k] = str(query_values[k])
|
||||
|
||||
return query_values
|
||||
|
||||
|
||||
|
|
@ -80,29 +77,3 @@ def single_value(values):
|
|||
values[i] = v.value
|
||||
return values
|
||||
|
||||
|
||||
def coordinate_conditions(condition_x, condition_y, values, value_key="value", is_not=False):
|
||||
query = []
|
||||
if len(values) == 2:
|
||||
# if 2 values are provided, it means x=v[0] and y=v[1]
|
||||
for i in range(len(values)):
|
||||
k = f"{value_key}_{i}"
|
||||
if i == 0:
|
||||
query.append(f"{condition_x}=%({k})s")
|
||||
elif i == 1:
|
||||
query.append(f"{condition_y}=%({k})s")
|
||||
|
||||
elif len(values) == 4:
|
||||
# if 4 values are provided, it means v[0]<=x<=v[1] and v[2]<=y<=v[3]
|
||||
for i in range(len(values)):
|
||||
k = f"{value_key}_{i}"
|
||||
if i == 0:
|
||||
query.append(f"{condition_x}>=%({k})s")
|
||||
elif i == 1:
|
||||
query.append(f"{condition_x}<=%({k})s")
|
||||
elif i == 2:
|
||||
query.append(f"{condition_y}>=%({k})s")
|
||||
elif i == 3:
|
||||
query.append(f"{condition_y}<=%({k})s")
|
||||
|
||||
return "(" + (" AND " if is_not else " OR ").join(query) + ")"
|
||||
|
|
|
|||
|
|
@ -74,6 +74,4 @@ EXP_CH_DRIVER=true
|
|||
EXP_AUTOCOMPLETE=true
|
||||
EXP_ALERTS=true
|
||||
EXP_ERRORS_SEARCH=true
|
||||
EXP_METRICS=true
|
||||
EXP_SESSIONS_SEARCH=true
|
||||
EXP_EVENTS=true
|
||||
EXP_METRICS=true
|
||||
|
|
@ -68,5 +68,4 @@ EXP_CH_DRIVER=true
|
|||
EXP_AUTOCOMPLETE=true
|
||||
EXP_ALERTS=true
|
||||
EXP_ERRORS_SEARCH=true
|
||||
EXP_METRICS=true
|
||||
EXP_EVENTS=true
|
||||
EXP_METRICS=true
|
||||
|
|
@ -1,16 +1,17 @@
|
|||
urllib3==2.4.0
|
||||
urllib3==2.3.0
|
||||
requests==2.32.3
|
||||
boto3==1.38.16
|
||||
boto3==1.36.12
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.9
|
||||
clickhouse-connect==0.8.17
|
||||
elasticsearch==9.0.1
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
jira==3.8.0
|
||||
cachetools==5.5.2
|
||||
cachetools==5.5.1
|
||||
|
||||
fastapi==0.115.12
|
||||
uvicorn[standard]==0.34.2
|
||||
fastapi==0.115.8
|
||||
uvicorn[standard]==0.34.0
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.11.4
|
||||
pydantic[email]==2.10.6
|
||||
apscheduler==3.11.0
|
||||
|
|
|
|||
|
|
@ -1,18 +1,19 @@
|
|||
urllib3==2.4.0
|
||||
urllib3==2.3.0
|
||||
requests==2.32.3
|
||||
boto3==1.38.16
|
||||
boto3==1.36.12
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.9
|
||||
clickhouse-connect==0.8.17
|
||||
elasticsearch==9.0.1
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
jira==3.8.0
|
||||
cachetools==5.5.2
|
||||
cachetools==5.5.1
|
||||
|
||||
fastapi==0.115.12
|
||||
uvicorn[standard]==0.34.2
|
||||
fastapi==0.115.8
|
||||
uvicorn[standard]==0.34.0
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.11.4
|
||||
pydantic[email]==2.10.6
|
||||
apscheduler==3.11.0
|
||||
|
||||
redis==6.1.0
|
||||
redis==5.2.1
|
||||
|
|
|
|||
|
|
@ -4,9 +4,8 @@ from decouple import config
|
|||
from fastapi import Depends, Body, BackgroundTasks
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, projects, metadata, reset_password, log_tools, \
|
||||
from chalicelib.core import events, projects, issues, metadata, reset_password, log_tools, \
|
||||
announcements, weekly_report, assist, mobile, tenants, boarding, notifications, webhook, users, saved_search, tags
|
||||
from chalicelib.core.issues import issues
|
||||
from chalicelib.core.sourcemaps import sourcemaps
|
||||
from chalicelib.core.metrics import custom_metrics
|
||||
from chalicelib.core.alerts import alerts
|
||||
|
|
|
|||
|
|
@ -8,14 +8,13 @@ from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Re
|
|||
|
||||
import schemas
|
||||
from chalicelib.core import assist, signup, feature_flags
|
||||
from chalicelib.core import notes
|
||||
from chalicelib.core import scope
|
||||
from chalicelib.core import tenants, users, projects, license
|
||||
from chalicelib.core import webhook
|
||||
from chalicelib.core.collaborations.collaboration_slack import Slack
|
||||
from chalicelib.core.errors import errors, errors_details
|
||||
from chalicelib.core.metrics import heatmaps
|
||||
from chalicelib.core.sessions import sessions, sessions_replay, sessions_favorite, sessions_viewed, \
|
||||
from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_viewed, \
|
||||
sessions_assignments, unprocessed_sessions, sessions_search
|
||||
from chalicelib.utils import captcha, smtp
|
||||
from chalicelib.utils import contextual_validators
|
||||
|
|
@ -260,7 +259,8 @@ def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
|
|||
def search_sessions(projectId: int, data: schemas.SessionsSearchPayloadSchema = \
|
||||
Depends(contextual_validators.validate_contextual_payload),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id)
|
||||
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id,
|
||||
platform=context.project.platform)
|
||||
return {'data': data}
|
||||
|
||||
|
||||
|
|
@ -268,7 +268,8 @@ def search_sessions(projectId: int, data: schemas.SessionsSearchPayloadSchema =
|
|||
def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = \
|
||||
Depends(contextual_validators.validate_contextual_payload),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id, ids_only=True)
|
||||
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id, ids_only=True,
|
||||
platform=context.project.platform)
|
||||
return {'data': data}
|
||||
|
||||
|
||||
|
|
@ -474,8 +475,8 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str,
|
|||
|
||||
@app.get('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
|
||||
def get_note_by_id(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = notes.get_note(tenant_id=context.tenant_id, project_id=projectId, note_id=noteId,
|
||||
user_id=context.user_id)
|
||||
data = sessions_notes.get_note(tenant_id=context.tenant_id, project_id=projectId, note_id=noteId,
|
||||
user_id=context.user_id)
|
||||
if "errors" in data:
|
||||
return data
|
||||
return {
|
||||
|
|
@ -488,8 +489,8 @@ def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema
|
|||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if not sessions.session_exists(project_id=projectId, session_id=sessionId):
|
||||
return {"errors": ["Session not found"]}
|
||||
data = notes.create(tenant_id=context.tenant_id, project_id=projectId,
|
||||
session_id=sessionId, user_id=context.user_id, data=data)
|
||||
data = sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId,
|
||||
session_id=sessionId, user_id=context.user_id, data=data)
|
||||
if "errors" in data.keys():
|
||||
return data
|
||||
return {
|
||||
|
|
@ -499,8 +500,8 @@ def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema
|
|||
|
||||
@app.get('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"])
|
||||
def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId,
|
||||
session_id=sessionId, user_id=context.user_id)
|
||||
data = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId,
|
||||
session_id=sessionId, user_id=context.user_id)
|
||||
if "errors" in data:
|
||||
return data
|
||||
return {
|
||||
|
|
@ -511,8 +512,8 @@ def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentCo
|
|||
@app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
|
||||
def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId, data=data)
|
||||
data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId, data=data)
|
||||
if "errors" in data.keys():
|
||||
return data
|
||||
return {
|
||||
|
|
@ -522,29 +523,29 @@ def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema
|
|||
|
||||
@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
|
||||
def delete_note(projectId: int, noteId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = notes.delete(project_id=projectId, note_id=noteId)
|
||||
data = sessions_notes.delete(project_id=projectId, note_id=noteId)
|
||||
return data
|
||||
|
||||
|
||||
@app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"])
|
||||
def share_note_to_slack(projectId: int, noteId: int, webhookId: int,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId, webhook_id=webhookId)
|
||||
return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId, webhook_id=webhookId)
|
||||
|
||||
|
||||
@app.get('/{projectId}/notes/{noteId}/msteams/{webhookId}', tags=["sessions", "notes"])
|
||||
def share_note_to_msteams(projectId: int, noteId: int, webhookId: int,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId, webhook_id=webhookId)
|
||||
return sessions_notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId, webhook_id=webhookId)
|
||||
|
||||
|
||||
@app.post('/{projectId}/notes', tags=["sessions", "notes"])
|
||||
def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId,
|
||||
user_id=context.user_id, data=data)
|
||||
data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId,
|
||||
user_id=context.user_id, data=data)
|
||||
if "errors" in data:
|
||||
return data
|
||||
return {'data': data}
|
||||
|
|
|
|||
|
|
@ -219,17 +219,6 @@ def get_card_chart(projectId: int, metric_id: int, data: schemas.CardSessionsSch
|
|||
return {"data": data}
|
||||
|
||||
|
||||
@app.post("/{projectId}/dashboards/{dashboardId}/cards/{metric_id}/chart", tags=["card"])
|
||||
@app.post("/{projectId}/dashboards/{dashboardId}/cards/{metric_id}", tags=["card"])
|
||||
def get_card_chart_for_dashboard(projectId: int, dashboardId: int, metric_id: int,
|
||||
data: schemas.CardSessionsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = custom_metrics.make_chart_from_card(
|
||||
project=context.project, user_id=context.user_id, metric_id=metric_id, data=data, for_dashboard=True
|
||||
)
|
||||
return {"data": data}
|
||||
|
||||
|
||||
@app.post("/{projectId}/cards/{metric_id}", tags=["dashboard"])
|
||||
def update_card(projectId: int, metric_id: int, data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
|
|
|
|||
|
|
@ -1,77 +0,0 @@
|
|||
from typing import Annotated
|
||||
|
||||
from fastapi import Body, Depends, Query
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import metadata
|
||||
from chalicelib.core.product_analytics import events, properties, autocomplete
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
from typing import Optional
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
|
||||
@app.get('/{projectId}/filters', tags=["product_analytics"])
|
||||
def get_all_filters(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {
|
||||
"data": {
|
||||
"events": events.get_events(project_id=projectId, page=filter_query),
|
||||
"filters": properties.get_all_properties(project_id=projectId, page=filter_query),
|
||||
"metadata": metadata.get_for_filters(project_id=projectId)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@app.get('/{projectId}/events/names', tags=["product_analytics"])
|
||||
def get_all_events(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": events.get_events(project_id=projectId, page=filter_query)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/properties/search', tags=["product_analytics"])
|
||||
def get_event_properties(projectId: int, event_name: str = None,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if not event_name or len(event_name) == 0:
|
||||
return {"data": []}
|
||||
return {"data": properties.get_event_properties(project_id=projectId, event_name=event_name)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/events/search', tags=["product_analytics"])
|
||||
def search_events(projectId: int, data: schemas.EventsSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": events.search_events(project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/lexicon/events', tags=["product_analytics", "lexicon"])
|
||||
def get_all_lexicon_events(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": events.get_lexicon(project_id=projectId, page=filter_query)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/lexicon/properties', tags=["product_analytics", "lexicon"])
|
||||
def get_all_lexicon_properties(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": properties.get_lexicon(project_id=projectId, page=filter_query)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/events/autocomplete', tags=["autocomplete"])
|
||||
def autocomplete_events(projectId: int, q: Optional[str] = None,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": autocomplete.search_events(project_id=projectId, q=None if not q or len(q) == 0 else q)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/properties/autocomplete', tags=["autocomplete"])
|
||||
def autocomplete_properties(projectId: int, propertyName: Optional[str] = None, eventName: Optional[str] = None,
|
||||
q: Optional[str] = None, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if not propertyName and not eventName and not q:
|
||||
return {"error": ["Specify eventName to get top properties",
|
||||
"Specify propertyName to get top values of that property",
|
||||
"Specify eventName&propertyName to get top values of that property for the selected event"]}
|
||||
return {"data": autocomplete.search_properties(project_id=projectId,
|
||||
event_name=None if not eventName \
|
||||
or len(eventName) == 0 else eventName,
|
||||
property_name=None if not propertyName \
|
||||
or len(propertyName) == 0 else propertyName,
|
||||
q=None if not q or len(q) == 0 else q)}
|
||||
15
api/routers/subs/product_anaytics.py
Normal file
15
api/routers/subs/product_anaytics.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
import schemas
|
||||
from chalicelib.core.metrics import product_anaytics2
|
||||
from fastapi import Depends
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
|
||||
@app.post('/{projectId}/events/search', tags=["dashboard"])
|
||||
def search_events(projectId: int,
|
||||
# data: schemas.CreateDashboardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return product_anaytics2.search_events(project_id=projectId, data={})
|
||||
|
|
@ -1,12 +1,10 @@
|
|||
from typing import Annotated
|
||||
from fastapi import Body, Depends
|
||||
|
||||
from fastapi import Body, Depends, Query
|
||||
|
||||
import schemas
|
||||
from chalicelib.core.usability_testing import service
|
||||
from chalicelib.core.usability_testing.schema import UTTestCreate, UTTestUpdate, UTTestSearch
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
from schemas import schemas
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
tags = ["usability-tests"]
|
||||
|
|
@ -79,7 +77,9 @@ async def update_ut_test(projectId: int, test_id: int, test_update: UTTestUpdate
|
|||
|
||||
|
||||
@app.get('/{projectId}/usability-tests/{test_id}/sessions', tags=tags)
|
||||
async def get_sessions(projectId: int, test_id: int, filter_query: Annotated[schemas.UsabilityTestQuery, Query()]):
|
||||
async def get_sessions(projectId: int, test_id: int, page: int = 1, limit: int = 10,
|
||||
live: bool = False,
|
||||
user_id: str = None):
|
||||
"""
|
||||
Get sessions related to a specific UT test.
|
||||
|
||||
|
|
@ -87,23 +87,21 @@ async def get_sessions(projectId: int, test_id: int, filter_query: Annotated[sch
|
|||
- **test_id**: The unique identifier of the UT test.
|
||||
"""
|
||||
|
||||
if filter_query.live:
|
||||
return service.ut_tests_sessions_live(projectId, test_id, filter_query.page, filter_query.limit)
|
||||
if live:
|
||||
return service.ut_tests_sessions_live(projectId, test_id, page, limit)
|
||||
else:
|
||||
return service.ut_tests_sessions(projectId, test_id, filter_query.page, filter_query.limit,
|
||||
filter_query.user_id, filter_query.live)
|
||||
return service.ut_tests_sessions(projectId, test_id, page, limit, user_id, live)
|
||||
|
||||
|
||||
@app.get('/{projectId}/usability-tests/{test_id}/responses/{task_id}', tags=tags)
|
||||
async def get_responses(projectId: int, test_id: int, task_id: int,
|
||||
filter_query: Annotated[schemas.PaginatedSchema, Query()], query: str = None):
|
||||
async def get_responses(projectId: int, test_id: int, task_id: int, page: int = 1, limit: int = 10, query: str = None):
|
||||
"""
|
||||
Get responses related to a specific UT test.
|
||||
|
||||
- **project_id**: The unique identifier of the project.
|
||||
- **test_id**: The unique identifier of the UT test.
|
||||
"""
|
||||
return service.get_responses(test_id, task_id, filter_query.page, filter_query.limit, query)
|
||||
return service.get_responses(test_id, task_id, page, limit, query)
|
||||
|
||||
|
||||
@app.get('/{projectId}/usability-tests/{test_id}/statistics', tags=tags)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,2 @@
|
|||
from .schemas import *
|
||||
from .product_analytics import *
|
||||
from . import overrides as _overrides
|
||||
from .schemas import _PaginatedSchema as PaginatedSchema
|
||||
|
|
|
|||
|
|
@ -1,22 +0,0 @@
|
|||
from typing import Optional, List, Literal, Union, Annotated
|
||||
from pydantic import Field
|
||||
|
||||
from .overrides import BaseModel
|
||||
from .schemas import EventPropertiesSchema, SortOrderType, _TimedSchema, \
|
||||
_PaginatedSchema, PropertyFilterSchema
|
||||
|
||||
|
||||
class EventSearchSchema(BaseModel):
|
||||
is_event: Literal[True] = True
|
||||
name: str = Field(...)
|
||||
properties: Optional[EventPropertiesSchema] = Field(default=None)
|
||||
|
||||
|
||||
ProductAnalyticsGroupedFilter = Annotated[Union[EventSearchSchema, PropertyFilterSchema], \
|
||||
Field(discriminator='is_event')]
|
||||
|
||||
|
||||
class EventsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
||||
filters: List[ProductAnalyticsGroupedFilter] = Field(...)
|
||||
sort: str = Field(default="startTs")
|
||||
order: SortOrderType = Field(default=SortOrderType.DESC)
|
||||
|
|
@ -3,13 +3,12 @@ from typing import Optional, List, Union, Literal
|
|||
|
||||
from pydantic import Field, EmailStr, HttpUrl, SecretStr, AnyHttpUrl
|
||||
from pydantic import field_validator, model_validator, computed_field
|
||||
from pydantic import AfterValidator
|
||||
from pydantic.functional_validators import BeforeValidator
|
||||
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from .overrides import BaseModel, Enum, ORUnion
|
||||
from .transformers_validators import transform_email, remove_whitespace, remove_duplicate_values, single_to_list, \
|
||||
force_is_event, NAME_PATTERN, int_to_string, check_alphanumeric, check_regex
|
||||
force_is_event, NAME_PATTERN, int_to_string, check_alphanumeric
|
||||
|
||||
|
||||
class _GRecaptcha(BaseModel):
|
||||
|
|
@ -405,9 +404,6 @@ class EventType(str, Enum):
|
|||
REQUEST_MOBILE = "requestMobile"
|
||||
ERROR_MOBILE = "errorMobile"
|
||||
SWIPE_MOBILE = "swipeMobile"
|
||||
EVENT = "event"
|
||||
INCIDENT = "incident"
|
||||
CLICK_COORDINATES = "clickCoordinates"
|
||||
|
||||
|
||||
class PerformanceEventType(str, Enum):
|
||||
|
|
@ -468,7 +464,6 @@ class SearchEventOperator(str, Enum):
|
|||
NOT_CONTAINS = "notContains"
|
||||
STARTS_WITH = "startsWith"
|
||||
ENDS_WITH = "endsWith"
|
||||
PATTERN = "regex"
|
||||
|
||||
|
||||
class ClickEventExtraOperator(str, Enum):
|
||||
|
|
@ -508,8 +503,8 @@ class IssueType(str, Enum):
|
|||
CUSTOM = 'custom'
|
||||
JS_EXCEPTION = 'js_exception'
|
||||
MOUSE_THRASHING = 'mouse_thrashing'
|
||||
TAP_RAGE = 'tap_rage' # IOS
|
||||
INCIDENT = 'incident'
|
||||
# IOS
|
||||
TAP_RAGE = 'tap_rage'
|
||||
|
||||
|
||||
class MetricFormatType(str, Enum):
|
||||
|
|
@ -540,7 +535,7 @@ class GraphqlFilterType(str, Enum):
|
|||
class RequestGraphqlFilterSchema(BaseModel):
|
||||
type: Union[FetchFilterType, GraphqlFilterType] = Field(...)
|
||||
value: List[Union[int, str]] = Field(...)
|
||||
operator: Annotated[Union[SearchEventOperator, MathOperator], AfterValidator(check_regex)] = Field(...)
|
||||
operator: Union[SearchEventOperator, MathOperator] = Field(...)
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
|
|
@ -550,85 +545,7 @@ class RequestGraphqlFilterSchema(BaseModel):
|
|||
return values
|
||||
|
||||
|
||||
class EventPredefinedPropertyType(str, Enum):
|
||||
TIME = "$time"
|
||||
SOURCE = "$source"
|
||||
DURATION_S = "$duration_s"
|
||||
DESCRIPTION = "description"
|
||||
AUTO_CAPTURED = "$auto_captured"
|
||||
SDK_EDITION = "$sdk_edition"
|
||||
SDK_VERSION = "$sdk_version"
|
||||
DEVICE_ID = "$device_id"
|
||||
OS = "$os"
|
||||
OS_VERSION = "$os_version"
|
||||
BROWSER = "$browser"
|
||||
BROWSER_VERSION = "$browser_version"
|
||||
DEVICE = "$device"
|
||||
SCREEN_HEIGHT = "$screen_height"
|
||||
SCREEN_WIDTH = "$screen_width"
|
||||
CURRENT_URL = "$current_url"
|
||||
INITIAL_REFERRER = "$initial_referrer"
|
||||
REFERRING_DOMAIN = "$referring_domain"
|
||||
REFERRER = "$referrer"
|
||||
INITIAL_REFERRING_DOMAIN = "$initial_referring_domain"
|
||||
SEARCH_ENGINE = "$search_engine"
|
||||
SEARCH_ENGINE_KEYWORD = "$search_engine_keyword"
|
||||
UTM_SOURCE = "utm_source"
|
||||
UTM_MEDIUM = "utm_medium"
|
||||
UTM_CAMPAIGN = "utm_campaign"
|
||||
COUNTRY = "$country"
|
||||
STATE = "$state"
|
||||
CITY = "$city"
|
||||
ISSUE_TYPE = "issue_type"
|
||||
TAGS = "$tags"
|
||||
IMPORT = "$import"
|
||||
|
||||
|
||||
class PropertyType(str, Enum):
|
||||
INT = "int"
|
||||
FLOAT = "float"
|
||||
DATETIME = "datetime"
|
||||
STRING = "string"
|
||||
ARRAY = "array"
|
||||
TUPLE = "tuple"
|
||||
MAP = "map"
|
||||
NESTED = "nested"
|
||||
|
||||
|
||||
class PropertyFilterSchema(BaseModel):
|
||||
is_event: Literal[False] = False
|
||||
name: Union[EventPredefinedPropertyType, str] = Field(...)
|
||||
operator: Union[SearchEventOperator, MathOperator] = Field(...)
|
||||
value: List[Union[int, str]] = Field(...)
|
||||
data_type: PropertyType = Field(default=PropertyType.STRING.value)
|
||||
|
||||
# property_type: Optional[Literal["string", "number", "date"]] = Field(default=None)
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def is_predefined(self) -> bool:
|
||||
return EventPredefinedPropertyType.has_value(self.name)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def transform_name(self):
|
||||
if isinstance(self.name, Enum):
|
||||
self.name = self.name.value
|
||||
return self
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
class EventPropertiesSchema(BaseModel):
|
||||
operator: Literal["and", "or"] = Field(...)
|
||||
filters: List[PropertyFilterSchema] = Field(...)
|
||||
|
||||
|
||||
class SessionSearchEventSchema(BaseModel):
|
||||
class SessionSearchEventSchema2(BaseModel):
|
||||
is_event: Literal[True] = True
|
||||
value: List[Union[str, int]] = Field(...)
|
||||
type: Union[EventType, PerformanceEventType] = Field(...)
|
||||
|
|
@ -636,7 +553,6 @@ class SessionSearchEventSchema(BaseModel):
|
|||
source: Optional[List[Union[ErrorSource, int, str]]] = Field(default=None)
|
||||
sourceOperator: Optional[MathOperator] = Field(default=None)
|
||||
filters: Optional[List[RequestGraphqlFilterSchema]] = Field(default_factory=list)
|
||||
properties: Optional[EventPropertiesSchema] = Field(default=None)
|
||||
|
||||
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
|
||||
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
|
||||
|
|
@ -661,23 +577,12 @@ class SessionSearchEventSchema(BaseModel):
|
|||
elif self.type == EventType.GRAPHQL:
|
||||
assert isinstance(self.filters, List) and len(self.filters) > 0, \
|
||||
f"filters should be defined for {EventType.GRAPHQL}"
|
||||
elif self.type == EventType.CLICK_COORDINATES:
|
||||
assert isinstance(self.value, List) \
|
||||
and (len(self.value) == 0 or len(self.value) == 2 or len(self.value) == 4), \
|
||||
f"value should be [x,y] or [x1,x2,y1,y2] for {EventType.CLICK_COORDINATES}"
|
||||
|
||||
if isinstance(self.operator, ClickEventExtraOperator):
|
||||
assert self.type == EventType.CLICK, \
|
||||
f"operator:{self.operator} is only available for event-type: {EventType.CLICK}"
|
||||
return self
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
class SessionSearchFilterSchema(BaseModel):
|
||||
is_event: Literal[False] = False
|
||||
|
|
@ -735,13 +640,6 @@ class SessionSearchFilterSchema(BaseModel):
|
|||
|
||||
return self
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
class _PaginatedSchema(BaseModel):
|
||||
limit: int = Field(default=200, gt=0, le=200)
|
||||
|
|
@ -762,12 +660,12 @@ def add_missing_is_event(values: dict):
|
|||
|
||||
|
||||
# this type is created to allow mixing events&filters and specifying a discriminator
|
||||
GroupedFilterType = Annotated[Union[SessionSearchFilterSchema, SessionSearchEventSchema],
|
||||
GroupedFilterType = Annotated[Union[SessionSearchFilterSchema, SessionSearchEventSchema2],
|
||||
Field(discriminator='is_event'), BeforeValidator(add_missing_is_event)]
|
||||
|
||||
|
||||
class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
||||
events: List[SessionSearchEventSchema] = Field(default_factory=list, doc_hidden=True)
|
||||
events: List[SessionSearchEventSchema2] = Field(default_factory=list, doc_hidden=True)
|
||||
filters: List[GroupedFilterType] = Field(default_factory=list)
|
||||
sort: str = Field(default="startTs")
|
||||
order: SortOrderType = Field(default=SortOrderType.DESC)
|
||||
|
|
@ -792,8 +690,6 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
|||
def add_missing_attributes(cls, values):
|
||||
# in case isEvent is wrong:
|
||||
for f in values.get("filters") or []:
|
||||
if f.get("type") is None:
|
||||
continue
|
||||
if EventType.has_value(f["type"]) and not f.get("isEvent"):
|
||||
f["isEvent"] = True
|
||||
elif FilterType.has_value(f["type"]) and f.get("isEvent"):
|
||||
|
|
@ -819,15 +715,6 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
|||
f["value"] = vals
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def check_pa_event_filter(self):
|
||||
for v in self.filters + self.events:
|
||||
if v.type == EventType.EVENT:
|
||||
assert v.operator in (SearchEventOperator.IS, MathOperator.EQUAL), \
|
||||
"operator must be {SearchEventOperator.IS} or {MathOperator.EQUAL} for EVENT type"
|
||||
assert len(v.value) == 1, "value must have 1 single value for EVENT type"
|
||||
return self
|
||||
|
||||
@model_validator(mode="after")
|
||||
def split_filters_events(self):
|
||||
n_filters = []
|
||||
|
|
@ -908,13 +795,6 @@ class PathAnalysisSubFilterSchema(BaseModel):
|
|||
values["isEvent"] = True
|
||||
return values
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
class _ProductAnalyticsFilter(BaseModel):
|
||||
is_event: Literal[False] = False
|
||||
|
|
@ -925,13 +805,6 @@ class _ProductAnalyticsFilter(BaseModel):
|
|||
|
||||
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
class _ProductAnalyticsEventFilter(BaseModel):
|
||||
is_event: Literal[True] = True
|
||||
|
|
@ -942,13 +815,6 @@ class _ProductAnalyticsEventFilter(BaseModel):
|
|||
|
||||
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
# this type is created to allow mixing events&filters and specifying a discriminator for PathAnalysis series filter
|
||||
ProductAnalyticsFilter = Annotated[Union[_ProductAnalyticsFilter, _ProductAnalyticsEventFilter],
|
||||
|
|
@ -1393,13 +1259,6 @@ class LiveSessionSearchFilterSchema(BaseModel):
|
|||
assert len(self.source) > 0, "source should not be empty for METADATA type"
|
||||
return self
|
||||
|
||||
@model_validator(mode='after')
|
||||
def _check_regex_value(self):
|
||||
if self.operator == SearchEventOperator.PATTERN:
|
||||
for v in self.value:
|
||||
check_regex(v)
|
||||
return self
|
||||
|
||||
|
||||
class LiveSessionsSearchPayloadSchema(_PaginatedSchema):
|
||||
filters: List[LiveSessionSearchFilterSchema] = Field([])
|
||||
|
|
@ -1525,8 +1384,8 @@ class MetricSearchSchema(_PaginatedSchema):
|
|||
mine_only: bool = Field(default=False)
|
||||
|
||||
|
||||
class _HeatMapSearchEventRaw(SessionSearchEventSchema):
|
||||
type: Literal[EventType.LOCATION, EventType.CLICK_COORDINATES] = Field(...)
|
||||
class _HeatMapSearchEventRaw(SessionSearchEventSchema2):
|
||||
type: Literal[EventType.LOCATION] = Field(...)
|
||||
|
||||
|
||||
class HeatMapSessionsSearch(SessionsSearchPayloadSchema):
|
||||
|
|
@ -1650,34 +1509,3 @@ class TagCreate(TagUpdate):
|
|||
|
||||
class ScopeSchema(BaseModel):
|
||||
scope: int = Field(default=1, ge=1, le=2)
|
||||
|
||||
|
||||
class SessionModel(BaseModel):
|
||||
duration: int
|
||||
errorsCount: int
|
||||
eventsCount: int
|
||||
issueScore: int
|
||||
issueTypes: List[IssueType] = Field(default=[])
|
||||
metadata: dict = Field(default={})
|
||||
pagesCount: int
|
||||
platform: str
|
||||
projectId: int
|
||||
sessionId: str
|
||||
startTs: int
|
||||
timezone: Optional[str]
|
||||
userAnonymousId: Optional[str]
|
||||
userBrowser: str
|
||||
userCity: str
|
||||
userCountry: str
|
||||
userDevice: Optional[str]
|
||||
userDeviceType: str
|
||||
userId: Optional[str]
|
||||
userOs: str
|
||||
userState: str
|
||||
userUuid: str
|
||||
viewed: bool = Field(default=False)
|
||||
|
||||
|
||||
class UsabilityTestQuery(_PaginatedSchema):
|
||||
live: bool = Field(default=False)
|
||||
user_id: Optional[str] = Field(default=None)
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
import re
|
||||
from typing import Union, Any, Type
|
||||
|
||||
from pydantic import ValidationInfo
|
||||
|
||||
from .overrides import Enum
|
||||
|
||||
NAME_PATTERN = r"^[a-z,A-Z,0-9,\-,é,è,à,ç, ,|,&,\/,\\,_,.,#,']*$"
|
||||
NAME_PATTERN = r"^[a-z,A-Z,0-9,\-,é,è,à,ç, ,|,&,\/,\\,_,.,#]*$"
|
||||
|
||||
|
||||
def transform_email(email: str) -> str:
|
||||
|
|
@ -58,17 +57,3 @@ def check_alphanumeric(v: str, info: ValidationInfo) -> str:
|
|||
is_alphanumeric = v.replace(' ', '').isalnum()
|
||||
assert is_alphanumeric, f'{info.field_name} must be alphanumeric'
|
||||
return v
|
||||
|
||||
|
||||
def check_regex(v: str) -> str:
|
||||
assert v is not None, "Regex is null"
|
||||
assert isinstance(v, str), "Regex value must be a string"
|
||||
assert len(v) > 0, "Regex is empty"
|
||||
is_valid = None
|
||||
try:
|
||||
re.compile(v)
|
||||
except re.error as exc:
|
||||
is_valid = f"Invalid regex: {exc} (at position {exc.pos})"
|
||||
|
||||
assert is_valid is None, is_valid
|
||||
return v
|
||||
|
|
|
|||
|
|
@ -1,61 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh <ee>
|
||||
|
||||
ARCH=${ARCH:-amd64}
|
||||
git_sha=$(git rev-parse --short HEAD)
|
||||
image_tag=${IMAGE_TAG:-git_sha}
|
||||
check_prereq() {
|
||||
which docker || {
|
||||
echo "Docker not installed, please install docker."
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
source ../scripts/lib/_docker.sh
|
||||
|
||||
[[ $PATCH -eq 1 ]] && {
|
||||
image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
image_tag="${image_tag}-ee"
|
||||
}
|
||||
update_helm_release() {
|
||||
chart=$1
|
||||
HELM_TAG="$(grep -iER ^version ../scripts/helmcharts/openreplay/charts/$chart | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
# Update the chart version
|
||||
sed -i "s#^version.*#version: $HELM_TAG# g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Update image tags
|
||||
sed -i "s#ppVersion.*#ppVersion: \"$image_tag\"#g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Commit the changes
|
||||
git add ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
git commit -m "chore(helm): Updating $chart image release"
|
||||
}
|
||||
|
||||
function build_api() {
|
||||
destination="_assist-server_ee"
|
||||
[[ -d ../${destination} ]] && {
|
||||
echo "Removing previous build cache"
|
||||
rm -rf ../${destination}
|
||||
}
|
||||
cp -R ../assist-server ../${destination}
|
||||
cd ../${destination} || exit 1
|
||||
cp -rf ../ee/assist-server/* ./
|
||||
|
||||
docker build -f ./Dockerfile --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/assist-server:${image_tag} .
|
||||
|
||||
cd ../assist-server || exit 1
|
||||
rm -rf ../${destination}
|
||||
[[ $PUSH_IMAGE -eq 1 ]] && {
|
||||
docker push ${DOCKER_REPO:-'local'}/assist-server:${image_tag}
|
||||
docker tag ${DOCKER_REPO:-'local'}/assist-server:${image_tag} ${DOCKER_REPO:-'local'}/assist-server:latest
|
||||
docker push ${DOCKER_REPO:-'local'}/assist-server:latest
|
||||
}
|
||||
[[ $SIGN_IMAGE -eq 1 ]] && {
|
||||
cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/assist-server:${image_tag}
|
||||
}
|
||||
echo "build completed for assist-server"
|
||||
}
|
||||
|
||||
check_prereq
|
||||
build_api $1
|
||||
if [[ $PATCH -eq 1 ]]; then
|
||||
update_helm_release assist-server
|
||||
fi
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
ee ?= "false" # true to build ee
|
||||
app ?= "" # app name, default all
|
||||
arch ?= "amd64" # default amd64
|
||||
docker_runtime ?= "docker" # default docker runtime
|
||||
|
||||
.PHONY: help
|
||||
help: ## Prints help for targets with comments
|
||||
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-25s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
|
||||
|
||||
##@ Docker
|
||||
|
||||
.PHONY: build
|
||||
build: ## Build the backend. ee=true for ee build. app=app name for only one app. Default build all apps.
|
||||
ARCH=$(arch) DOCKER_RUNTIME=$(docker_runtime) bash build.sh $(ee) $(app)
|
||||
|
||||
##@ Local Dev
|
||||
|
||||
.PHONY: scan
|
||||
scan: ## Scan the backend
|
||||
@trivy fs -q .
|
||||
|
||||
.PHONY: update
|
||||
update: ## Update the backend dependecies
|
||||
@echo Updating dependencies
|
||||
@go get -u -v ./...
|
||||
@go mod tidy
|
||||
|
||||
run: ## Run the backend. app=app name for app to run
|
||||
@if [ $(app) == "" ]; then echo "Error: app parameter is required. Usage: make run app=<app_name>"; exit 1; fi
|
||||
@go run "cmd/$(app)/main.go"
|
||||
|
|
@ -2,14 +2,12 @@ package main
|
|||
|
||||
import (
|
||||
"context"
|
||||
|
||||
analyticsConfig "openreplay/backend/internal/config/analytics"
|
||||
"openreplay/backend/pkg/analytics"
|
||||
"openreplay/backend/pkg/analytics/db"
|
||||
"openreplay/backend/pkg/db/postgres/pool"
|
||||
"openreplay/backend/pkg/logger"
|
||||
"openreplay/backend/pkg/metrics"
|
||||
//analyticsMetrics "openreplay/backend/pkg/metrics/analytics"
|
||||
//databaseMetrics "openreplay/backend/pkg/metrics/database"
|
||||
"openreplay/backend/pkg/metrics/database"
|
||||
"openreplay/backend/pkg/metrics/web"
|
||||
"openreplay/backend/pkg/server"
|
||||
|
|
@ -20,6 +18,7 @@ func main() {
|
|||
ctx := context.Background()
|
||||
log := logger.New()
|
||||
cfg := analyticsConfig.New(log)
|
||||
// Observability
|
||||
webMetrics := web.New("analytics")
|
||||
dbMetrics := database.New("analytics")
|
||||
metrics.New(log, append(webMetrics.List(), dbMetrics.List()...))
|
||||
|
|
@ -30,13 +29,7 @@ func main() {
|
|||
}
|
||||
defer pgConn.Close()
|
||||
|
||||
chConn, err := db.NewConnector(cfg.Clickhouse)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "can't init clickhouse connection: %s", err)
|
||||
}
|
||||
defer chConn.Stop()
|
||||
|
||||
builder, err := analytics.NewServiceBuilder(log, cfg, webMetrics, dbMetrics, pgConn, chConn)
|
||||
builder, err := analytics.NewServiceBuilder(log, cfg, webMetrics, dbMetrics, pgConn)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "can't init services: %s", err)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ func main() {
|
|||
messages.MsgMetadata, messages.MsgIssueEvent, messages.MsgSessionStart, messages.MsgSessionEnd,
|
||||
messages.MsgUserID, messages.MsgUserAnonymousID, messages.MsgIntegrationEvent, messages.MsgPerformanceTrackAggr,
|
||||
messages.MsgJSException, messages.MsgResourceTiming, messages.MsgCustomEvent, messages.MsgCustomIssue,
|
||||
messages.MsgFetch, messages.MsgNetworkRequest, messages.MsgGraphQL, messages.MsgStateAction, messages.MsgMouseClick,
|
||||
messages.MsgNetworkRequest, messages.MsgGraphQL, messages.MsgStateAction, messages.MsgMouseClick,
|
||||
messages.MsgMouseClickDeprecated, messages.MsgSetPageLocation, messages.MsgSetPageLocationDeprecated,
|
||||
messages.MsgPageLoadTiming, messages.MsgPageRenderTiming,
|
||||
messages.MsgPageEvent, messages.MsgPageEventDeprecated, messages.MsgMouseThrashing, messages.MsgInputChange,
|
||||
|
|
|
|||
|
|
@ -100,7 +100,6 @@ func main() {
|
|||
// Process assets
|
||||
if msg.TypeID() == messages.MsgSetNodeAttributeURLBased ||
|
||||
msg.TypeID() == messages.MsgSetCSSDataURLBased ||
|
||||
msg.TypeID() == messages.MsgCSSInsertRuleURLBased ||
|
||||
msg.TypeID() == messages.MsgAdoptedSSReplaceURLBased ||
|
||||
msg.TypeID() == messages.MsgAdoptedSSInsertRuleURLBased {
|
||||
m := msg.Decode()
|
||||
|
|
|
|||
|
|
@ -1,54 +1,52 @@
|
|||
module openreplay/backend
|
||||
|
||||
go 1.23.0
|
||||
|
||||
toolchain go1.23.1
|
||||
go 1.23
|
||||
|
||||
require (
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0
|
||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.0
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.34.0
|
||||
github.com/DataDog/datadog-api-client-go/v2 v2.37.1
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.32.1
|
||||
github.com/DataDog/datadog-api-client-go/v2 v2.34.0
|
||||
github.com/Masterminds/semver v1.5.0
|
||||
github.com/andybalholm/brotli v1.1.1
|
||||
github.com/aws/aws-sdk-go v1.55.6
|
||||
github.com/btcsuite/btcutil v1.0.2
|
||||
github.com/confluentinc/confluent-kafka-go/v2 v2.10.0
|
||||
github.com/confluentinc/confluent-kafka-go/v2 v2.8.0
|
||||
github.com/docker/distribution v2.8.3+incompatible
|
||||
github.com/elastic/go-elasticsearch/v7 v7.17.10
|
||||
github.com/elastic/go-elasticsearch/v8 v8.18.0
|
||||
github.com/getsentry/sentry-go v0.32.0
|
||||
github.com/go-playground/validator/v10 v10.26.0
|
||||
github.com/elastic/go-elasticsearch/v8 v8.17.0
|
||||
github.com/getsentry/sentry-go v0.31.1
|
||||
github.com/go-playground/validator/v10 v10.24.0
|
||||
github.com/go-redis/redis v6.15.9+incompatible
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/gorilla/mux v1.8.1
|
||||
github.com/jackc/pgconn v1.14.3
|
||||
github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438
|
||||
github.com/jackc/pgtype v1.14.4
|
||||
github.com/jackc/pgx/v4 v4.18.3
|
||||
github.com/klauspost/compress v1.18.0
|
||||
github.com/klauspost/compress v1.17.11
|
||||
github.com/klauspost/pgzip v1.2.6
|
||||
github.com/lib/pq v1.10.9
|
||||
github.com/oschwald/maxminddb-golang v1.13.1
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/prometheus/client_golang v1.22.0
|
||||
github.com/prometheus/client_golang v1.20.5
|
||||
github.com/rs/xid v1.6.0
|
||||
github.com/sethvargo/go-envconfig v1.2.0
|
||||
github.com/sethvargo/go-envconfig v1.1.0
|
||||
github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce
|
||||
github.com/ua-parser/uap-go v0.0.0-20250326155420-f7f5a2f9f5bc
|
||||
github.com/ua-parser/uap-go v0.0.0-20250126222208-a52596c19dff
|
||||
go.uber.org/zap v1.27.0
|
||||
golang.org/x/net v0.39.0
|
||||
golang.org/x/net v0.35.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
||||
github.com/ClickHouse/ch-go v0.65.1 // indirect
|
||||
github.com/DataDog/zstd v1.5.7 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
|
||||
github.com/ClickHouse/ch-go v0.65.0 // indirect
|
||||
github.com/DataDog/zstd v1.5.6 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/elastic/elastic-transport-go/v8 v8.7.0 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.9 // indirect
|
||||
github.com/elastic/elastic-transport-go/v8 v8.6.0 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
|
||||
github.com/go-faster/city v1.0.1 // indirect
|
||||
github.com/go-faster/errors v0.7.1 // indirect
|
||||
github.com/go-logr/logr v1.4.2 // indirect
|
||||
|
|
@ -68,23 +66,23 @@ require (
|
|||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/paulmach/orb v0.11.1 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||
github.com/prometheus/client_model v0.6.2 // indirect
|
||||
github.com/prometheus/common v0.63.0 // indirect
|
||||
github.com/prometheus/procfs v0.16.0 // indirect
|
||||
github.com/prometheus/client_model v0.6.1 // indirect
|
||||
github.com/prometheus/common v0.62.0 // indirect
|
||||
github.com/prometheus/procfs v0.15.1 // indirect
|
||||
github.com/segmentio/asm v1.2.0 // indirect
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/sirupsen/logrus v1.9.3 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/otel v1.35.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.35.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.35.0 // indirect
|
||||
go.opentelemetry.io/otel v1.34.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.34.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.34.0 // indirect
|
||||
go.uber.org/multierr v1.11.0 // indirect
|
||||
golang.org/x/crypto v0.37.0 // indirect
|
||||
golang.org/x/oauth2 v0.29.0 // indirect
|
||||
golang.org/x/sys v0.32.0 // indirect
|
||||
golang.org/x/text v0.24.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
golang.org/x/crypto v0.33.0 // indirect
|
||||
golang.org/x/oauth2 v0.25.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/text v0.22.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250127172529-29210b9bc287 // indirect
|
||||
google.golang.org/protobuf v1.36.4 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
|
|
|||
|
|
@ -6,17 +6,10 @@ github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkk
|
|||
github.com/AlecAivazis/survey/v2 v2.3.7/go.mod h1:xUTIdE4KCOIjsBAE1JYsUPoCqYdZ1reCfTwbto0Fduo=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 h1:g0EZJwz7xkXQiZAI5xi9f3WWFYBlX1CPTrR+NDToRkQ=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0/go.mod h1:XCW7KnZet0Opnr7HccfUw1PLc4CjHqpcaxW8DHklNkQ=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 h1:B/dfvscEQtew9dVuoxqxrUKKv8Ih2f55PydknDamU+g=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0/go.mod h1:fiPSssYvltE08HJchL04dOy+RD4hgrjph0cwGGMntdI=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2 h1:F0gBpfdPLGsw+nsgk6aqqkZS1jiixa5WwFe3fk/T3Ys=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.0 h1:Bg8m3nq/X1DeePkAbCfb6ml6F3F0IunEhE8TMh+lY48=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.0/go.mod h1:j2chePtV91HrC22tGoRX3sGY42uF13WzmmV80/OdVAA=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 h1:FPKJS1T+clwv+OLGt13a8UjqeRuh0O4SJ3lUriThc+4=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1/go.mod h1:j2chePtV91HrC22tGoRX3sGY42uF13WzmmV80/OdVAA=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.6.0 h1:PiSrjRPpkQNjrM8H0WwKMnZUdu1RGMtd/LdGKUrOo+c=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.6.0/go.mod h1:oDrbWx4ewMylP7xHivfgixbfGBT6APAwsSoHRKotnIc=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.0 h1:UXT0o77lXQrikd1kgwIPQOUect7EoR/+sbP4wQKdzxM=
|
||||
|
|
@ -25,28 +18,19 @@ github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOEl
|
|||
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2 h1:kYRSnvJju5gYVyhkij+RTJ/VR6QIUaCfWeaFm2ycsjQ=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/ClickHouse/ch-go v0.63.1 h1:s2JyZvWLTCSAGdtjMBBmAgQQHMco6pawLJMOXi0FODM=
|
||||
github.com/ClickHouse/ch-go v0.63.1/go.mod h1:I1kJJCL3WJcBMGe1m+HVK0+nREaG+JOYYBWjrDrF3R0=
|
||||
github.com/ClickHouse/ch-go v0.65.0 h1:vZAXfTQliuNNefqkPDewX3kgRxN6Q4vUENnnY+ynTRY=
|
||||
github.com/ClickHouse/ch-go v0.65.0/go.mod h1:tCM0XEH5oWngoi9Iu/8+tjPBo04I/FxNIffpdjtwx3k=
|
||||
github.com/ClickHouse/ch-go v0.65.1 h1:SLuxmLl5Mjj44/XbINsK2HFvzqup0s6rwKLFH347ZhU=
|
||||
github.com/ClickHouse/ch-go v0.65.1/go.mod h1:bsodgURwmrkvkBe5jw1qnGDgyITsYErfONKAHn05nv4=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.30.1 h1:Dy0n0l+cMbPXs8hFkeeWGaPKrB+MDByUNQBSmRO3W6k=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.30.1/go.mod h1:szk8BMoQV/NgHXZ20ZbwDyvPWmpfhRKjFkc6wzASGxM=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.32.1 h1:RLhkxA6iH/bLTXeDtEj/u4yUx9Q03Y95P+cjHScQK78=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.32.1/go.mod h1:YtaiIFlHCGNPbOpAvFGYobtcVnmgYvD/WmzitixxWYc=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.34.0 h1:Y4rqkdrRHgExvC4o/NTbLdY5LFQ3LHS77/RNFxFX3Co=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.34.0/go.mod h1:yioSINoRLVZkLyDzdMXPLRIqhDvel8iLBlwh6Iefso8=
|
||||
github.com/DataDog/datadog-api-client-go/v2 v2.34.0 h1:0VVmv8uZg8vdBuEpiF2nBGUezl2QITrxdEsLgh38j8M=
|
||||
github.com/DataDog/datadog-api-client-go/v2 v2.34.0/go.mod h1:d3tOEgUd2kfsr9uuHQdY+nXrWp4uikgTgVCPdKNK30U=
|
||||
github.com/DataDog/datadog-api-client-go/v2 v2.37.1 h1:weZhrGMO//sMEoSKWngoSQwMp4zBSlEX4p3/YWy9ltw=
|
||||
github.com/DataDog/datadog-api-client-go/v2 v2.37.1/go.mod h1:d3tOEgUd2kfsr9uuHQdY+nXrWp4uikgTgVCPdKNK30U=
|
||||
github.com/DataDog/zstd v1.5.6 h1:LbEglqepa/ipmmQJUDnSsfvA8e8IStVcGaFWDuxvGOY=
|
||||
github.com/DataDog/zstd v1.5.6/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw=
|
||||
github.com/DataDog/zstd v1.5.7 h1:ybO8RBeh29qrxIhCA9E8gKY6xfONU9T6G6aP9DTKfLE=
|
||||
github.com/DataDog/zstd v1.5.7/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw=
|
||||
github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=
|
||||
github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
|
||||
github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
|
||||
|
|
@ -113,8 +97,6 @@ github.com/compose-spec/compose-go/v2 v2.1.3 h1:bD67uqLuL/XgkAK6ir3xZvNLFPxPScEi
|
|||
github.com/compose-spec/compose-go/v2 v2.1.3/go.mod h1:lFN0DrMxIncJGYAXTfWuajfwj5haBJqrBkarHcnjJKc=
|
||||
github.com/confluentinc/confluent-kafka-go/v2 v2.8.0 h1:0HlcSNWg4LpLA9nIjzUMIqWHI+w0S68UN7alXAc3TeA=
|
||||
github.com/confluentinc/confluent-kafka-go/v2 v2.8.0/go.mod h1:hScqtFIGUI1wqHIgM3mjoqEou4VweGGGX7dMpcUKves=
|
||||
github.com/confluentinc/confluent-kafka-go/v2 v2.10.0 h1:TK5CH5RbIj/aVfmJFEsDUT6vD2izac2zmA5BUfAOxC0=
|
||||
github.com/confluentinc/confluent-kafka-go/v2 v2.10.0/go.mod h1:hScqtFIGUI1wqHIgM3mjoqEou4VweGGGX7dMpcUKves=
|
||||
github.com/containerd/console v1.0.4 h1:F2g4+oChYvBTsASRTz8NP6iIAi97J3TtSAsLbIFn4ro=
|
||||
github.com/containerd/console v1.0.4/go.mod h1:YynlIjWYF8myEu6sdkwKIvGQq+cOckRm6So2avqoYAk=
|
||||
github.com/containerd/containerd v1.7.18 h1:jqjZTQNfXGoEaZdW1WwPU0RqSn1Bm2Ay/KJPUuO8nao=
|
||||
|
|
@ -166,14 +148,10 @@ github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203 h1:XBBHcIb256gUJ
|
|||
github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203/go.mod h1:E1jcSv8FaEny+OP/5k9UxZVw9YFWGj7eI4KR/iOBqCg=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.6.0 h1:Y2S/FBjx1LlCv5m6pWAF2kDJAHoSjSRSJCApolgfthA=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.6.0/go.mod h1:YLHer5cj0csTzNFXoNQ8qhtGY1GTvSqPnKWKaqQE3Hk=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.7.0 h1:OgTneVuXP2uip4BA658Xi6Hfw+PeIOod2rY3GVMGoVE=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.7.0/go.mod h1:YLHer5cj0csTzNFXoNQ8qhtGY1GTvSqPnKWKaqQE3Hk=
|
||||
github.com/elastic/go-elasticsearch/v7 v7.17.10 h1:TCQ8i4PmIJuBunvBS6bwT2ybzVFxxUhhltAs3Gyu1yo=
|
||||
github.com/elastic/go-elasticsearch/v7 v7.17.10/go.mod h1:OJ4wdbtDNk5g503kvlHLyErCgQwwzmDtaFC4XyOxXA4=
|
||||
github.com/elastic/go-elasticsearch/v8 v8.17.0 h1:e9cWksE/Fr7urDRmGPGp47Nsp4/mvNOrU8As1l2HQQ0=
|
||||
github.com/elastic/go-elasticsearch/v8 v8.17.0/go.mod h1:lGMlgKIbYoRvay3xWBeKahAiJOgmFDsjZC39nmO3H64=
|
||||
github.com/elastic/go-elasticsearch/v8 v8.18.0 h1:ANNq1h7DEiPUaALb8+5w3baQzaS08WfHV0DNzp0VG4M=
|
||||
github.com/elastic/go-elasticsearch/v8 v8.18.0/go.mod h1:WLqwXsJmQoYkoA9JBFeEwPkQhCfAZuUvfpdU/NvSSf0=
|
||||
github.com/emicklei/go-restful/v3 v3.11.0 h1:rAQeMHw1c7zTmncogyy8VvRZwtkmkZ4FxERmMY4rD+g=
|
||||
github.com/emicklei/go-restful/v3 v3.11.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc=
|
||||
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||
|
|
@ -185,12 +163,8 @@ github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM4
|
|||
github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0=
|
||||
github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM=
|
||||
github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8=
|
||||
github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY=
|
||||
github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok=
|
||||
github.com/getsentry/sentry-go v0.31.1 h1:ELVc0h7gwyhnXHDouXkhqTFSO5oslsRDk0++eyE0KJ4=
|
||||
github.com/getsentry/sentry-go v0.31.1/go.mod h1:CYNcMMz73YigoHljQRG+qPF+eMq8gG72XcGN/p71BAY=
|
||||
github.com/getsentry/sentry-go v0.32.0 h1:YKs+//QmwE3DcYtfKRH8/KyOOF/I6Qnx7qYGNHCGmCY=
|
||||
github.com/getsentry/sentry-go v0.32.0/go.mod h1:CYNcMMz73YigoHljQRG+qPF+eMq8gG72XcGN/p71BAY=
|
||||
github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA=
|
||||
github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og=
|
||||
github.com/go-faster/city v1.0.1 h1:4WAxSZ3V2Ws4QRDrscLEDcibJY8uf41H6AhXDrNDcGw=
|
||||
|
|
@ -220,8 +194,6 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn
|
|||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.24.0 h1:KHQckvo8G6hlWnrPX4NJJ+aBfWNAE/HH+qdL2cBpCmg=
|
||||
github.com/go-playground/validator/v10 v10.24.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1x/7cByt++cQ+YOuDM5wus=
|
||||
github.com/go-playground/validator/v10 v10.26.0 h1:SP05Nqhjcvz81uJaRfEV0YBSSSGMc/iMaVtFbr3Sw2k=
|
||||
github.com/go-playground/validator/v10 v10.26.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
|
||||
github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg=
|
||||
github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
|
|
@ -239,8 +211,6 @@ github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
|
|||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
|
||||
|
|
@ -252,7 +222,6 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
|||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
|
||||
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
|
|
@ -359,8 +328,6 @@ github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6
|
|||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
|
||||
github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
|
||||
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
||||
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
|
||||
github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU=
|
||||
github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
|
|
@ -474,20 +441,12 @@ github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF
|
|||
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
||||
github.com/prometheus/client_golang v1.20.5 h1:cxppBPuYhUnsO6yo/aoRol4L7q7UFfdm+bR9r+8l63Y=
|
||||
github.com/prometheus/client_golang v1.20.5/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE=
|
||||
github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q=
|
||||
github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0=
|
||||
github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E=
|
||||
github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY=
|
||||
github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk=
|
||||
github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE=
|
||||
github.com/prometheus/common v0.62.0 h1:xasJaQlnWAeyHdUBeGjXmutelfJHWMRr+Fg4QszZ2Io=
|
||||
github.com/prometheus/common v0.62.0/go.mod h1:vyBcEuLSvWos9B1+CyL7JZ2up+uFzXhkqml0W5zIY1I=
|
||||
github.com/prometheus/common v0.63.0 h1:YR/EIY1o3mEFP/kZCD7iDMnLPlGyuU2Gb3HIcXnA98k=
|
||||
github.com/prometheus/common v0.63.0/go.mod h1:VVFF/fBIoToEnWRVkYoXEkq3R3paCoxG9PXP74SnV18=
|
||||
github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc=
|
||||
github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk=
|
||||
github.com/prometheus/procfs v0.16.0 h1:xh6oHhKwnOJKMYiYBDWmkHqQPyiY40sny36Cmx2bbsM=
|
||||
github.com/prometheus/procfs v0.16.0/go.mod h1:8veyXUu3nGP7oaCxhX6yeaM5u4stL2FeMXnCqhDthZg=
|
||||
github.com/r3labs/sse v0.0.0-20210224172625-26fe804710bc h1:zAsgcP8MhzAbhMnB1QQ2O7ZhWYVGYSR2iVcjzQuPV+o=
|
||||
github.com/r3labs/sse v0.0.0-20210224172625-26fe804710bc/go.mod h1:S8xSOnV3CgpNrWd0GQ/OoQfMtlg2uPRSuTzcSGrzwK8=
|
||||
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
|
||||
|
|
@ -509,8 +468,6 @@ github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b h1:h+3JX2VoWTFuyQ
|
|||
github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b/go.mod h1:/yeG0My1xr/u+HZrFQ1tOQQQQrOawfyMUH13ai5brBc=
|
||||
github.com/sethvargo/go-envconfig v1.1.0 h1:cWZiJxeTm7AlCvzGXrEXaSTCNgip5oJepekh/BOQuog=
|
||||
github.com/sethvargo/go-envconfig v1.1.0/go.mod h1:JLd0KFWQYzyENqnEPWWZ49i4vzZo/6nRidxI8YvGiHw=
|
||||
github.com/sethvargo/go-envconfig v1.2.0 h1:q3XkOZWkC+G1sMLCrw9oPGTjYexygLOXDmGUit1ti8Q=
|
||||
github.com/sethvargo/go-envconfig v1.2.0/go.mod h1:JLd0KFWQYzyENqnEPWWZ49i4vzZo/6nRidxI8YvGiHw=
|
||||
github.com/shibumi/go-pathspec v1.3.0 h1:QUyMZhFo0Md5B8zV8x2tesohbb5kfbpTi9rBnKh5dkI=
|
||||
github.com/shibumi/go-pathspec v1.3.0/go.mod h1:Xutfslp817l2I1cZvgcfeMQJG5QnU2lh5tVaaMCl3jE=
|
||||
github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI=
|
||||
|
|
@ -571,8 +528,6 @@ github.com/tonistiigi/vt100 v0.0.0-20240514184818-90bafcd6abab h1:H6aJ0yKQ0gF49Q
|
|||
github.com/tonistiigi/vt100 v0.0.0-20240514184818-90bafcd6abab/go.mod h1:ulncasL3N9uLrVann0m+CDlJKWsIAP34MPcOJF6VRvc=
|
||||
github.com/ua-parser/uap-go v0.0.0-20250126222208-a52596c19dff h1:NwMEGwb7JJ8wPjT8OPKP5hO1Xz6AQ7Z00+GLSJfW21s=
|
||||
github.com/ua-parser/uap-go v0.0.0-20250126222208-a52596c19dff/go.mod h1:BUbeWZiieNxAuuADTBNb3/aeje6on3DhU3rpWsQSB1E=
|
||||
github.com/ua-parser/uap-go v0.0.0-20250326155420-f7f5a2f9f5bc h1:reH9QQKGFOq39MYOvU9+SYrB8uzXtWNo51fWK3g0gGc=
|
||||
github.com/ua-parser/uap-go v0.0.0-20250326155420-f7f5a2f9f5bc/go.mod h1:gwANdYmo9R8LLwGnyDFWK2PMsaXXX2HhAvCnb/UhZsM=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
|
||||
github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g=
|
||||
github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8=
|
||||
|
|
@ -602,8 +557,6 @@ go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u
|
|||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw=
|
||||
go.opentelemetry.io/otel v1.34.0 h1:zRLXxLCgL1WyKsPVrgbSdMN4c0FMkDAskSTQP+0hdUY=
|
||||
go.opentelemetry.io/otel v1.34.0/go.mod h1:OWFPOQ+h4G8xpyjgqo4SxJYdDQ/qmRH+wivy7zzx9oI=
|
||||
go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ=
|
||||
go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0 h1:ZtfnDL+tUrs1F0Pzfwbg2d59Gru9NCH3bgSHBM6LDwU=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0/go.mod h1:hG4Fj/y8TR/tlEDREo8tWstl9fO9gcFkn4xrx0Io8xU=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.42.0 h1:NmnYCiR0qNufkldjVvyQfZTHSdzeHoZ41zggMsdMcLM=
|
||||
|
|
@ -618,16 +571,12 @@ go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0 h1:digkE
|
|||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0/go.mod h1:/OpE/y70qVkndM0TrxT4KBoN3RsFZP0QaofcfYrj76I=
|
||||
go.opentelemetry.io/otel/metric v1.34.0 h1:+eTR3U0MyfWjRDhmFMxe2SsW64QrZ84AOhvqS7Y+PoQ=
|
||||
go.opentelemetry.io/otel/metric v1.34.0/go.mod h1:CEDrp0fy2D0MvkXE+dPV7cMi8tWZwX3dmaIhwPOaqHE=
|
||||
go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M=
|
||||
go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE=
|
||||
go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucgoDw=
|
||||
go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.21.0 h1:smhI5oD714d6jHE6Tie36fPx4WDFIg+Y6RfAY4ICcR0=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.21.0/go.mod h1:FJ8RAsoPGv/wYMgBdUJXOm+6pzFY3YdljnXtv1SBE8Q=
|
||||
go.opentelemetry.io/otel/trace v1.34.0 h1:+ouXS2V8Rd4hp4580a8q23bg0azF2nI8cqLYnC8mh/k=
|
||||
go.opentelemetry.io/otel/trace v1.34.0/go.mod h1:Svm7lSjQD7kG7KJ/MUHPVXSDGz2OX4h0M2jHBhmSfRE=
|
||||
go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs=
|
||||
go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc=
|
||||
go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I=
|
||||
go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM=
|
||||
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||
|
|
@ -668,10 +617,6 @@ golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc=
|
|||
golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc=
|
||||
golang.org/x/crypto v0.33.0 h1:IOBPskki6Lysi0lo9qQvbxiQ+FvsCC/YWOecCHAixus=
|
||||
golang.org/x/crypto v0.33.0/go.mod h1:bVdXmD7IV/4GdElGPozy6U7lWdRXA4qyRVGJV57uQ5M=
|
||||
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
|
||||
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
|
||||
golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE=
|
||||
golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc=
|
||||
golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3 h1:hNQpMuAJe5CtcUqCXaWga3FHu+kQvCqcsoVaQgSV60o=
|
||||
golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3/go.mod h1:idGWGoKP1toJGkd5/ig9ZLuPcZBC3ewk7SzmH0uou08=
|
||||
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
|
|
@ -698,14 +643,8 @@ golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
|
|||
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
|
||||
golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8=
|
||||
golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk=
|
||||
golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8=
|
||||
golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
|
||||
golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY=
|
||||
golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E=
|
||||
golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70=
|
||||
golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.29.0 h1:WdYw2tdTK1S8olAzWHdgeqfy+Mtm9XNhv/xJsY65d98=
|
||||
golang.org/x/oauth2 v0.29.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
|
|
@ -740,10 +679,6 @@ golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
|||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
|
||||
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20=
|
||||
golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
|
|
@ -765,10 +700,6 @@ golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
|
|||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
|
||||
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
|
||||
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
|
||||
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
|
||||
golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0=
|
||||
golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU=
|
||||
golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U=
|
||||
golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
|
|
@ -796,18 +727,12 @@ google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237 h1:
|
|||
google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237/go.mod h1:Z5Iiy3jtmioajWHDGFk7CeugTyHtPvMHA4UTmUkyalE=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250127172529-29210b9bc287 h1:J1H9f+LEdWAfHcez/4cvaVBox7cOYT+IU6rgqj5x++8=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250127172529-29210b9bc287/go.mod h1:8BS3B93F/U1juMFq9+EDk+qOT5CO1R9IzXxG3PTqiRk=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e h1:ztQaXfzEXTmCBvbtWYRhJxW+0iJcz2qXfd38/e9l7bA=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
|
||||
google.golang.org/grpc v1.64.1 h1:LKtvyfbX3UGVPFcGqJ9ItpVWW6oN/2XqTxfAnwRRXiA=
|
||||
google.golang.org/grpc v1.64.1/go.mod h1:hiQF4LFZelK2WKaP6W0L92zGHtiQdZxk8CrSdvyjeP0=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.36.4 h1:6A3ZDJHn/eNqc1i+IdefRzy/9PokBTPvcqMySR7NNIM=
|
||||
google.golang.org/protobuf v1.36.4/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
||||
google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM=
|
||||
google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
||||
google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
|
||||
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
|
||||
gopkg.in/cenkalti/backoff.v1 v1.1.0 h1:Arh75ttbsvlpVA7WtVpH4u9h6Zl46xuptxqLxPiSo4Y=
|
||||
gopkg.in/cenkalti/backoff.v1 v1.1.0/go.mod h1:J6Vskwqd+OMVJl8C33mmtxTBs2gyzfv7UDAkHu8BrjI=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ import (
|
|||
type Config struct {
|
||||
common.Config
|
||||
common.Postgres
|
||||
common.Clickhouse
|
||||
redis.Redis
|
||||
objectstorage.ObjectsConfig
|
||||
common.HTTP
|
||||
|
|
|
|||
|
|
@ -3,10 +3,11 @@ package datasaver
|
|||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"openreplay/backend/pkg/db/types"
|
||||
|
||||
"openreplay/backend/internal/config/db"
|
||||
"openreplay/backend/pkg/db/clickhouse"
|
||||
"openreplay/backend/pkg/db/postgres"
|
||||
"openreplay/backend/pkg/db/types"
|
||||
"openreplay/backend/pkg/logger"
|
||||
. "openreplay/backend/pkg/messages"
|
||||
queue "openreplay/backend/pkg/queue/types"
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ package datasaver
|
|||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"openreplay/backend/pkg/db/postgres"
|
||||
"openreplay/backend/pkg/db/types"
|
||||
"openreplay/backend/pkg/messages"
|
||||
|
|
|
|||
|
|
@ -133,17 +133,6 @@ func (e *AssetsCache) ParseAssets(msg messages.Message) messages.Message {
|
|||
}
|
||||
newMsg.SetMeta(msg.Meta())
|
||||
return newMsg
|
||||
case *messages.CSSInsertRuleURLBased:
|
||||
if e.shouldSkipAsset(m.BaseURL) {
|
||||
return msg
|
||||
}
|
||||
newMsg := &messages.CSSInsertRule{
|
||||
ID: m.ID,
|
||||
Index: m.Index,
|
||||
Rule: e.handleCSS(m.SessionID(), m.BaseURL, m.Rule),
|
||||
}
|
||||
newMsg.SetMeta(msg.Meta())
|
||||
return newMsg
|
||||
case *messages.AdoptedSSReplaceURLBased:
|
||||
if e.shouldSkipAsset(m.BaseURL) {
|
||||
return msg
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ package analytics
|
|||
import (
|
||||
"github.com/go-playground/validator/v10"
|
||||
"openreplay/backend/pkg/analytics/charts"
|
||||
"openreplay/backend/pkg/analytics/db"
|
||||
"openreplay/backend/pkg/metrics/database"
|
||||
"time"
|
||||
|
||||
|
|
@ -28,14 +27,13 @@ type ServicesBuilder struct {
|
|||
ChartsAPI api.Handlers
|
||||
}
|
||||
|
||||
func NewServiceBuilder(log logger.Logger, cfg *analytics.Config, webMetrics web.Web, dbMetrics database.Database, pgconn pool.Pool, chConn db.Connector) (*ServicesBuilder, error) {
|
||||
func NewServiceBuilder(log logger.Logger, cfg *analytics.Config, webMetrics web.Web, dbMetrics database.Database, pgconn pool.Pool) (*ServicesBuilder, error) {
|
||||
responser := api.NewResponser(webMetrics)
|
||||
audiTrail, err := tracer.NewTracer(log, pgconn, dbMetrics)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
reqValidator := validator.New()
|
||||
|
||||
cardsService, err := cards.New(log, pgconn)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -44,7 +42,6 @@ func NewServiceBuilder(log logger.Logger, cfg *analytics.Config, webMetrics web.
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
dashboardsService, err := dashboards.New(log, pgconn)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -53,8 +50,7 @@ func NewServiceBuilder(log logger.Logger, cfg *analytics.Config, webMetrics web.
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
chartsService, err := charts.New(log, pgconn, chConn)
|
||||
chartsService, err := charts.New(log, pgconn)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -62,7 +58,6 @@ func NewServiceBuilder(log logger.Logger, cfg *analytics.Config, webMetrics web.
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &ServicesBuilder{
|
||||
Auth: auth.NewAuth(log, cfg.JWTSecret, cfg.JWTSpotSecret, pgconn, nil, api.NoPrefix),
|
||||
RateLimiter: limiter.NewUserRateLimiter(10, 30, 1*time.Minute, 5*time.Minute),
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import (
|
|||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/jackc/pgx/v4"
|
||||
"github.com/lib/pq"
|
||||
|
||||
"openreplay/backend/pkg/db/postgres/pool"
|
||||
|
|
@ -47,12 +48,12 @@ func (s *cardsImpl) Create(projectId int, userID uint64, req *CardCreateRequest)
|
|||
ctx := context.Background()
|
||||
defer func() {
|
||||
if err != nil {
|
||||
err := tx.TxRollback()
|
||||
tx.Rollback(ctx)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
err := tx.TxCommit()
|
||||
err := tx.Commit(ctx)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
|
@ -66,8 +67,8 @@ func (s *cardsImpl) Create(projectId int, userID uint64, req *CardCreateRequest)
|
|||
RETURNING metric_id, project_id, user_id, name, metric_type, view_type, metric_of, metric_value, metric_format, is_public, created_at, edited_at`
|
||||
|
||||
card := &CardGetResponse{}
|
||||
err = tx.TxQueryRow(
|
||||
sql,
|
||||
err = tx.QueryRow(
|
||||
ctx, sql,
|
||||
projectId, userID, req.Name, req.MetricType, req.ViewType, req.MetricOf, req.MetricValue, req.MetricFormat, req.IsPublic,
|
||||
).Scan(
|
||||
&card.CardID,
|
||||
|
|
@ -97,7 +98,7 @@ func (s *cardsImpl) Create(projectId int, userID uint64, req *CardCreateRequest)
|
|||
return card, nil
|
||||
}
|
||||
|
||||
func (s *cardsImpl) CreateSeries(ctx context.Context, tx *pool.Tx, metricId int64, series []CardSeriesBase) []CardSeries {
|
||||
func (s *cardsImpl) CreateSeries(ctx context.Context, tx pgx.Tx, metricId int64, series []CardSeriesBase) []CardSeries {
|
||||
if len(series) == 0 {
|
||||
return nil // No series to create
|
||||
}
|
||||
|
|
@ -125,7 +126,7 @@ func (s *cardsImpl) CreateSeries(ctx context.Context, tx *pool.Tx, metricId int6
|
|||
query := fmt.Sprintf(sql, strings.Join(values, ","))
|
||||
s.log.Info(ctx, "Executing query: %s with args: %v", query, args)
|
||||
|
||||
rows, err := tx.TxQuery(query, args...)
|
||||
rows, err := tx.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
s.log.Error(ctx, "failed to execute batch insert for series: %v", err)
|
||||
return nil
|
||||
|
|
@ -358,12 +359,12 @@ func (s *cardsImpl) Update(projectId int, cardID int64, userID uint64, req *Card
|
|||
ctx := context.Background()
|
||||
defer func() {
|
||||
if err != nil {
|
||||
err := tx.TxRollback()
|
||||
tx.Rollback(ctx)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
err := tx.TxCommit()
|
||||
err := tx.Commit(ctx)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
|
@ -378,7 +379,7 @@ func (s *cardsImpl) Update(projectId int, cardID int64, userID uint64, req *Card
|
|||
RETURNING metric_id, project_id, user_id, name, metric_type, view_type, metric_of, metric_value, metric_format, is_public, created_at, edited_at`
|
||||
|
||||
card := &CardGetResponse{}
|
||||
err = tx.TxQueryRow(sql,
|
||||
err = tx.QueryRow(ctx, sql,
|
||||
req.Name, req.MetricType, req.ViewType, req.MetricOf, req.MetricValue, req.MetricFormat, req.IsPublic, cardID, projectId,
|
||||
).Scan(
|
||||
&card.CardID, &card.ProjectID, &card.UserID, &card.Name, &card.MetricType, &card.ViewType, &card.MetricOf,
|
||||
|
|
|
|||
|
|
@ -46,7 +46,6 @@ func (e *handlersImpl) GetAll() []*api.Description {
|
|||
{"/v1/analytics/{projectId}/cards/{id}", e.getCard, "GET"},
|
||||
{"/v1/analytics/{projectId}/cards/{id}", e.updateCard, "PUT"},
|
||||
{"/v1/analytics/{projectId}/cards/{id}", e.deleteCard, "DELETE"},
|
||||
{"/v1/analytics/{projectId}/cards/{id}/sessions", e.getCardSessions, "POST"},
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -297,8 +296,3 @@ func (e *handlersImpl) deleteCard(w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
e.responser.ResponseWithJSON(e.log, r.Context(), w, nil, startTime, r.URL.Path, bodySize)
|
||||
}
|
||||
|
||||
func (e *handlersImpl) getCardSessions(w http.ResponseWriter, r *http.Request) {
|
||||
// TODO: implement this
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusNotImplemented, fmt.Errorf("not implemented"), time.Now(), r.URL.Path, 0)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,24 +6,6 @@ import (
|
|||
"time"
|
||||
)
|
||||
|
||||
type MetricType string
|
||||
type MetricOfTimeseries string
|
||||
type MetricOfTable string
|
||||
|
||||
const (
|
||||
MetricTypeTimeseries MetricType = "TIMESERIES"
|
||||
MetricTypeTable MetricType = "TABLE"
|
||||
|
||||
MetricOfTimeseriesSessionCount MetricOfTimeseries = "SESSION_COUNT"
|
||||
MetricOfTimeseriesUserCount MetricOfTimeseries = "USER_COUNT"
|
||||
|
||||
MetricOfTableVisitedURL MetricOfTable = "VISITED_URL"
|
||||
MetricOfTableIssues MetricOfTable = "ISSUES"
|
||||
MetricOfTableUserCountry MetricOfTable = "USER_COUNTRY"
|
||||
MetricOfTableUserDevice MetricOfTable = "USER_DEVICE"
|
||||
MetricOfTableUserBrowser MetricOfTable = "USER_BROWSER"
|
||||
)
|
||||
|
||||
// CardBase Common fields for the Card entity
|
||||
type CardBase struct {
|
||||
Name string `json:"name" validate:"required"`
|
||||
|
|
@ -67,8 +49,8 @@ type CardSeries struct {
|
|||
}
|
||||
|
||||
type SeriesFilter struct {
|
||||
EventsOrder string `json:"eventsOrder" validate:"required,oneof=then or and"`
|
||||
Filters []FilterItem `json:"filters"`
|
||||
EventOrder string `json:"eventOrder" validate:"required,oneof=then or and"`
|
||||
Filters []FilterItem `json:"filters"`
|
||||
}
|
||||
|
||||
type FilterItem struct {
|
||||
|
|
@ -210,34 +192,3 @@ func (s *CardListSort) GetSQLField() string {
|
|||
func (s *CardListSort) GetSQLOrder() string {
|
||||
return strings.ToUpper(s.Order)
|
||||
}
|
||||
|
||||
// ---
|
||||
|
||||
/*
|
||||
class IssueType(str, Enum):
|
||||
|
||||
CLICK_RAGE = 'click_rage'
|
||||
DEAD_CLICK = 'dead_click'
|
||||
EXCESSIVE_SCROLLING = 'excessive_scrolling'
|
||||
BAD_REQUEST = 'bad_request'
|
||||
MISSING_RESOURCE = 'missing_resource'
|
||||
MEMORY = 'memory'
|
||||
CPU = 'cpu'
|
||||
SLOW_RESOURCE = 'slow_resource'
|
||||
SLOW_PAGE_LOAD = 'slow_page_load'
|
||||
CRASH = 'crash'
|
||||
CUSTOM = 'custom'
|
||||
JS_EXCEPTION = 'js_exception'
|
||||
MOUSE_THRASHING = 'mouse_thrashing'
|
||||
# IOS
|
||||
TAP_RAGE = 'tap_rage'
|
||||
*/
|
||||
type IssueType string
|
||||
type ChartData struct {
|
||||
StartTs uint64 `json:"startTs"`
|
||||
EndTs uint64 `json:"endTs"`
|
||||
Density uint64 `json:"density"`
|
||||
Filters []FilterItem `json:"filter"`
|
||||
MetricOf string `json:"metricOf"`
|
||||
MetricValue []IssueType `json:"metricValue"`
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,51 +1,50 @@
|
|||
package charts
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"openreplay/backend/pkg/analytics/db"
|
||||
|
||||
"openreplay/backend/pkg/db/postgres/pool"
|
||||
"openreplay/backend/pkg/logger"
|
||||
)
|
||||
|
||||
type Charts interface {
|
||||
GetData(projectId int, userId uint64, req *MetricPayload) (interface{}, error)
|
||||
GetData(projectId int, userId uint64, req *GetCardChartDataRequest) ([]DataPoint, error)
|
||||
}
|
||||
|
||||
type chartsImpl struct {
|
||||
log logger.Logger
|
||||
pgconn pool.Pool
|
||||
chConn db.Connector
|
||||
}
|
||||
|
||||
func New(log logger.Logger, conn pool.Pool, chConn db.Connector) (Charts, error) {
|
||||
func New(log logger.Logger, conn pool.Pool) (Charts, error) {
|
||||
return &chartsImpl{
|
||||
log: log,
|
||||
pgconn: conn,
|
||||
chConn: chConn,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// GetData def get_chart()
|
||||
func (s *chartsImpl) GetData(projectId int, userID uint64, req *MetricPayload) (interface{}, error) {
|
||||
if req == nil {
|
||||
return nil, fmt.Errorf("request is empty")
|
||||
func (s *chartsImpl) GetData(projectId int, userID uint64, req *GetCardChartDataRequest) ([]DataPoint, error) {
|
||||
jsonInput := `
|
||||
{
|
||||
"data": [
|
||||
{
|
||||
"timestamp": 1733934939000,
|
||||
"Series A": 100,
|
||||
"Series B": 200
|
||||
},
|
||||
{
|
||||
"timestamp": 1733935939000,
|
||||
"Series A": 150,
|
||||
"Series B": 250
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
||||
var resp GetCardChartDataResponse
|
||||
if err := json.Unmarshal([]byte(jsonInput), &resp); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal response: %w", err)
|
||||
}
|
||||
|
||||
payload := Payload{
|
||||
ProjectId: projectId,
|
||||
UserId: userID,
|
||||
MetricPayload: req,
|
||||
}
|
||||
qb, err := NewQueryBuilder(payload)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating query builder: %v", err)
|
||||
}
|
||||
|
||||
resp, err := qb.Execute(payload, s.chConn)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error executing query: %v", err)
|
||||
}
|
||||
|
||||
//return resp, nil
|
||||
return map[string]interface{}{"data": resp}, nil
|
||||
return resp.Data, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,427 +0,0 @@
|
|||
package charts
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Fields map[string]string
|
||||
|
||||
func getSessionMetaFields() Fields {
|
||||
return Fields{
|
||||
"revId": "rev_id",
|
||||
"country": "user_country",
|
||||
"os": "user_os",
|
||||
"platform": "user_device_type",
|
||||
"device": "user_device",
|
||||
"browser": "user_browser",
|
||||
}
|
||||
}
|
||||
|
||||
func getMetadataFields() Fields {
|
||||
return Fields{
|
||||
"userId": "user_id",
|
||||
"userAnonymousId": "user_anonymous_id",
|
||||
"metadata1": "metadata_1",
|
||||
"metadata2": "metadata_2",
|
||||
"metadata3": "metadata_3",
|
||||
"metadata4": "metadata_4",
|
||||
"metadata5": "metadata_5",
|
||||
"metadata6": "metadata_6",
|
||||
"metadata7": "metadata_7",
|
||||
"metadata8": "metadata_8",
|
||||
"metadata9": "metadata_9",
|
||||
"metadata10": "metadata_10",
|
||||
}
|
||||
}
|
||||
|
||||
func getStepSize(startTimestamp, endTimestamp int64, density int, decimal bool, factor int) float64 {
|
||||
factorInt64 := int64(factor)
|
||||
stepSize := (endTimestamp / factorInt64) - (startTimestamp / factorInt64)
|
||||
|
||||
if density <= 1 {
|
||||
return float64(stepSize)
|
||||
}
|
||||
|
||||
if decimal {
|
||||
return float64(stepSize) / float64(density)
|
||||
}
|
||||
|
||||
return float64(stepSize / int64(density-1))
|
||||
}
|
||||
|
||||
//func getStepSize(startTimestamp, endTimestamp, density uint64, decimal bool, factor uint64) float64 {
|
||||
// stepSize := (endTimestamp / factor) - (startTimestamp / factor) // TODO: should I use float64 here?
|
||||
// if !decimal {
|
||||
// density--
|
||||
// }
|
||||
// return float64(stepSize) / float64(density)
|
||||
//}
|
||||
|
||||
func getBasicConstraints(tableName string, timeConstraint, roundStart bool, data map[string]interface{}, identifier string) []string { // Если tableName не пустая, добавляем точку
|
||||
if tableName != "" {
|
||||
tableName += "."
|
||||
}
|
||||
chSubQuery := []string{fmt.Sprintf("%s%s = toUInt16(:%s)", tableName, identifier, identifier)}
|
||||
|
||||
if timeConstraint {
|
||||
if roundStart {
|
||||
chSubQuery = append(chSubQuery, fmt.Sprintf("toStartOfInterval(%sdatetime, INTERVAL :step_size second) >= toDateTime(:startTimestamp/1000)", tableName))
|
||||
} else {
|
||||
chSubQuery = append(chSubQuery, fmt.Sprintf("%sdatetime >= toDateTime(:startTimestamp/1000)", tableName))
|
||||
}
|
||||
chSubQuery = append(chSubQuery, fmt.Sprintf("%sdatetime < toDateTime(:endTimestamp/1000)", tableName))
|
||||
}
|
||||
return append(chSubQuery, getGenericConstraint(data, tableName)...)
|
||||
}
|
||||
|
||||
func getGenericConstraint(data map[string]interface{}, tableName string) []string {
|
||||
return getConstraint(data, getSessionMetaFields(), tableName)
|
||||
}
|
||||
|
||||
func getConstraint(data map[string]interface{}, fields Fields, tableName string) []string {
|
||||
var constraints []string
|
||||
filters, err := data["filters"].([]map[string]interface{})
|
||||
if !err {
|
||||
log.Println("error getting filters from data")
|
||||
filters = make([]map[string]interface{}, 0) // to skip the next block
|
||||
}
|
||||
|
||||
// process filters
|
||||
for i, f := range filters {
|
||||
key, _ := f["key"].(string)
|
||||
value, _ := f["value"].(string)
|
||||
|
||||
if field, ok := fields[key]; ok {
|
||||
if value == "*" || value == "" {
|
||||
constraints = append(constraints, fmt.Sprintf("isNotNull(%s%s)", tableName, field))
|
||||
} else {
|
||||
// constraints.append(f"{table_name}{fields[f['key']]} = %({f['key']}_{i})s")
|
||||
constraints = append(constraints, fmt.Sprintf("%s%s = %%(%s_%d)s", tableName, field, key, i)) // TODO: where we'll keep the value?
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO from Python: remove this in next release
|
||||
offset := len(filters)
|
||||
for i, f := range data {
|
||||
key, _ := f.(string)
|
||||
value, _ := data[key].(string)
|
||||
|
||||
if field, ok := fields[key]; ok {
|
||||
if value == "*" || value == "" {
|
||||
constraints = append(constraints, fmt.Sprintf("isNotNull(%s%s)", tableName, field))
|
||||
} else {
|
||||
intI, err := strconv.Atoi(i)
|
||||
if err != nil {
|
||||
log.Printf("error converting data[k] to int: %v", err)
|
||||
continue
|
||||
} else {
|
||||
constraints = append(constraints, fmt.Sprintf("%s%s = %%(%s_%d)s", tableName, field, f, intI+offset))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return constraints
|
||||
}
|
||||
|
||||
func getMetaConstraint(data map[string]interface{}) []string {
|
||||
return getConstraint(data, getMetadataFields(), "sessions_metadata.")
|
||||
}
|
||||
|
||||
func getConstraintValues(data map[string]interface{}) map[string]interface{} {
|
||||
params := make(map[string]interface{})
|
||||
|
||||
if filters, ok := data["filters"].([]map[string]interface{}); ok {
|
||||
for i, f := range filters {
|
||||
key, _ := f["key"].(string)
|
||||
value := f["value"]
|
||||
params[fmt.Sprintf("%s_%d", key, i)] = value
|
||||
}
|
||||
|
||||
// TODO from Python: remove this in next release
|
||||
offset := len(data["filters"].([]map[string]interface{}))
|
||||
i := 0
|
||||
for k, v := range data {
|
||||
params[fmt.Sprintf("%s_%d", k, i+offset)] = v
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
return params
|
||||
}
|
||||
|
||||
/*
|
||||
def get_main_sessions_table(timestamp=0):
|
||||
|
||||
return "experimental.sessions_l7d_mv" \
|
||||
if config("EXP_7D_MV", cast=bool, default=True) \
|
||||
and timestamp and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.sessions"
|
||||
*/
|
||||
func getMainSessionsTable(timestamp int64) string {
|
||||
return "experimental.sessions"
|
||||
}
|
||||
|
||||
// Function to convert named parameters to positional parameters
|
||||
func replaceNamedParams(query string, params map[string]interface{}) (string, []interface{}) {
|
||||
var args []interface{}
|
||||
i := 1
|
||||
for key, val := range params {
|
||||
placeholder := ":" + key
|
||||
//query = strings.Replace(query, placeholder, "?", 1)
|
||||
strVal := fmt.Sprintf("%v", val)
|
||||
query = strings.Replace(query, placeholder, strVal, -1)
|
||||
args = append(args, val)
|
||||
i++
|
||||
}
|
||||
return query, args
|
||||
}
|
||||
|
||||
// Helper function to generate a range of floats
|
||||
func frange(start, end, step float64) []float64 {
|
||||
var rangeValues []float64
|
||||
for i := start; i < end; i += step {
|
||||
rangeValues = append(rangeValues, i)
|
||||
}
|
||||
return rangeValues
|
||||
}
|
||||
|
||||
// Helper function to add missing keys from the "complete" map to the "original" map
|
||||
func addMissingKeys(original, complete map[string]interface{}) map[string]interface{} {
|
||||
for k, v := range complete {
|
||||
if _, exists := original[k]; !exists {
|
||||
original[k] = v
|
||||
}
|
||||
}
|
||||
return original
|
||||
}
|
||||
|
||||
// CompleteMissingSteps fills in missing steps in the data
|
||||
func CompleteMissingSteps(
|
||||
startTime, endTime int64,
|
||||
density int,
|
||||
neutral map[string]interface{},
|
||||
rows []map[string]interface{},
|
||||
timeKey string,
|
||||
timeCoefficient int64,
|
||||
) []map[string]interface{} {
|
||||
if len(rows) == density {
|
||||
return rows
|
||||
}
|
||||
|
||||
// Calculate the step size
|
||||
step := getStepSize(startTime, endTime, density, true, 1000)
|
||||
optimal := make([][2]uint64, 0)
|
||||
for _, i := range frange(float64(startTime)/float64(timeCoefficient), float64(endTime)/float64(timeCoefficient), step) {
|
||||
startInterval := uint64(i * float64(timeCoefficient))
|
||||
endInterval := uint64((i + step) * float64(timeCoefficient))
|
||||
optimal = append(optimal, [2]uint64{startInterval, endInterval})
|
||||
}
|
||||
|
||||
var result []map[string]interface{}
|
||||
r, o := 0, 0
|
||||
|
||||
// Iterate over density
|
||||
for i := 0; i < density; i++ {
|
||||
// Clone the neutral map
|
||||
neutralClone := make(map[string]interface{})
|
||||
for k, v := range neutral {
|
||||
if fn, ok := v.(func() interface{}); ok {
|
||||
neutralClone[k] = fn()
|
||||
} else {
|
||||
neutralClone[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
// If we can just add the rest of the rows to result
|
||||
if r < len(rows) && len(result)+len(rows)-r == density {
|
||||
result = append(result, rows[r:]...)
|
||||
break
|
||||
}
|
||||
|
||||
// Determine where the current row fits within the optimal intervals
|
||||
if r < len(rows) && o < len(optimal) && rows[r][timeKey].(uint64) < optimal[o][0] {
|
||||
rows[r] = addMissingKeys(rows[r], neutralClone)
|
||||
result = append(result, rows[r])
|
||||
r++
|
||||
} else if r < len(rows) && o < len(optimal) && optimal[o][0] <= rows[r][timeKey].(uint64) && rows[r][timeKey].(uint64) < optimal[o][1] {
|
||||
rows[r] = addMissingKeys(rows[r], neutralClone)
|
||||
result = append(result, rows[r])
|
||||
r++
|
||||
o++
|
||||
} else {
|
||||
neutralClone[timeKey] = optimal[o][0]
|
||||
result = append(result, neutralClone)
|
||||
o++
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func progress(oldVal, newVal uint64) float64 {
|
||||
if newVal > 0 {
|
||||
return (float64(oldVal-newVal) / float64(newVal)) * 100
|
||||
}
|
||||
if oldVal == 0 {
|
||||
return 0
|
||||
}
|
||||
return 100
|
||||
}
|
||||
|
||||
// Trying to find a common part
|
||||
func parse(projectID int, startTs, endTs int64, density int, args map[string]interface{}) ([]string, []string, map[string]interface{}) {
|
||||
stepSize := getStepSize(startTs, endTs, density, false, 1000)
|
||||
chSubQuery := getBasicConstraints("sessions", true, false, args, "project_id")
|
||||
chSubQueryChart := getBasicConstraints("sessions", true, true, args, "project_id")
|
||||
metaCondition := getMetaConstraint(args)
|
||||
chSubQuery = append(chSubQuery, metaCondition...)
|
||||
chSubQueryChart = append(chSubQueryChart, metaCondition...)
|
||||
|
||||
params := map[string]interface{}{
|
||||
"step_size": stepSize,
|
||||
"project_id": projectID,
|
||||
"startTimestamp": startTs,
|
||||
"endTimestamp": endTs,
|
||||
}
|
||||
for k, v := range getConstraintValues(args) {
|
||||
params[k] = v
|
||||
}
|
||||
return chSubQuery, chSubQueryChart, params
|
||||
}
|
||||
|
||||
// Sessions trend
|
||||
//func (s *chartsImpl) getProcessedSessions(projectID int, startTs, endTs int64, density int, args map[string]interface{}) (interface{}, error) {
|
||||
// chQuery := `
|
||||
// SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL :step_size second)) * 1000 AS timestamp,
|
||||
// COUNT(DISTINCT sessions.session_id) AS value
|
||||
// FROM :main_sessions_table AS sessions
|
||||
// WHERE :sub_query_chart
|
||||
// GROUP BY timestamp
|
||||
// ORDER BY timestamp;
|
||||
// `
|
||||
// chSubQuery, chSubQueryChart, params := parse(projectID, startTs, endTs, density, args)
|
||||
//
|
||||
// chQuery = strings.Replace(chQuery, ":main_sessions_table", getMainSessionsTable(startTs), -1)
|
||||
// chQuery = strings.Replace(chQuery, ":sub_query_chart", strings.Join(chSubQueryChart, " AND "), -1)
|
||||
//
|
||||
// preparedQuery, preparedArgs := replaceNamedParams(chQuery, params)
|
||||
// rows, err := s.chConn.Query(context.Background(), preparedQuery, preparedArgs)
|
||||
// if err != nil {
|
||||
// log.Fatalf("Error executing query: %v", err)
|
||||
// }
|
||||
// preparedRows := make([]map[string]interface{}, 0)
|
||||
// var sum uint64
|
||||
// for rows.Next() {
|
||||
// var timestamp, value uint64
|
||||
// if err := rows.Scan(×tamp, &value); err != nil {
|
||||
// log.Fatalf("Error scanning row: %v", err)
|
||||
// }
|
||||
// fmt.Printf("Timestamp: %d, Value: %d\n", timestamp, value)
|
||||
// sum += value
|
||||
// preparedRows = append(preparedRows, map[string]interface{}{"timestamp": timestamp, "value": value})
|
||||
// }
|
||||
//
|
||||
// results := map[string]interface{}{
|
||||
// "value": sum,
|
||||
// "chart": CompleteMissingSteps(startTs, endTs, int(density), map[string]interface{}{"value": 0}, preparedRows, "timestamp", 1000),
|
||||
// }
|
||||
//
|
||||
// diff := endTs - startTs
|
||||
// endTs = startTs
|
||||
// startTs = endTs - diff
|
||||
//
|
||||
// log.Println(results)
|
||||
//
|
||||
// chQuery = fmt.Sprintf(`
|
||||
// SELECT COUNT(1) AS count
|
||||
// FROM :main_sessions_table AS sessions
|
||||
// WHERE :sub_query_chart;
|
||||
// `)
|
||||
// chQuery = strings.Replace(chQuery, ":main_sessions_table", getMainSessionsTable(startTs), -1)
|
||||
// chQuery = strings.Replace(chQuery, ":sub_query_chart", strings.Join(chSubQuery, " AND "), -1)
|
||||
//
|
||||
// var count uint64
|
||||
//
|
||||
// preparedQuery, preparedArgs = replaceNamedParams(chQuery, params)
|
||||
// if err := s.chConn.QueryRow(context.Background(), preparedQuery, preparedArgs).Scan(&count); err != nil {
|
||||
// log.Fatalf("Error executing query: %v", err)
|
||||
// }
|
||||
//
|
||||
// results["progress"] = progress(count, results["value"].(uint64))
|
||||
//
|
||||
// // TODO: this should be returned in any case
|
||||
// results["unit"] = "COUNT"
|
||||
// fmt.Println(results)
|
||||
//
|
||||
// return results, nil
|
||||
//}
|
||||
//
|
||||
//// Users trend
|
||||
//func (s *chartsImpl) getUniqueUsers(projectID int, startTs, endTs int64, density int, args map[string]interface{}) (interface{}, error) {
|
||||
// chQuery := `
|
||||
// SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL :step_size second)) * 1000 AS timestamp,
|
||||
// COUNT(DISTINCT sessions.user_id) AS value
|
||||
// FROM :main_sessions_table AS sessions
|
||||
// WHERE :sub_query_chart
|
||||
// GROUP BY timestamp
|
||||
// ORDER BY timestamp;
|
||||
// `
|
||||
// chSubQuery, chSubQueryChart, params := parse(projectID, startTs, endTs, density, args)
|
||||
// chSubQueryChart = append(chSubQueryChart, []string{"isNotNull(sessions.user_id)", "sessions.user_id!=''"}...)
|
||||
//
|
||||
// chQuery = strings.Replace(chQuery, ":main_sessions_table", getMainSessionsTable(startTs), -1)
|
||||
// chQuery = strings.Replace(chQuery, ":sub_query_chart", strings.Join(chSubQueryChart, " AND "), -1)
|
||||
//
|
||||
// preparedQuery, preparedArgs := replaceNamedParams(chQuery, params)
|
||||
// rows, err := s.chConn.Query(context.Background(), preparedQuery, preparedArgs)
|
||||
// if err != nil {
|
||||
// log.Fatalf("Error executing query: %v", err)
|
||||
// }
|
||||
// preparedRows := make([]map[string]interface{}, 0)
|
||||
// var sum uint64
|
||||
// for rows.Next() {
|
||||
// var timestamp, value uint64
|
||||
// if err := rows.Scan(×tamp, &value); err != nil {
|
||||
// log.Fatalf("Error scanning row: %v", err)
|
||||
// }
|
||||
// fmt.Printf("Timestamp: %d, Value: %d\n", timestamp, value)
|
||||
// sum += value
|
||||
// preparedRows = append(preparedRows, map[string]interface{}{"timestamp": timestamp, "value": value})
|
||||
// }
|
||||
//
|
||||
// results := map[string]interface{}{
|
||||
// "value": sum,
|
||||
// "chart": CompleteMissingSteps(startTs, endTs, int(density), map[string]interface{}{"value": 0}, preparedRows, "timestamp", 1000),
|
||||
// }
|
||||
//
|
||||
// diff := endTs - startTs
|
||||
// endTs = startTs
|
||||
// startTs = endTs - diff
|
||||
//
|
||||
// log.Println(results)
|
||||
//
|
||||
// chQuery = fmt.Sprintf(`
|
||||
// SELECT COUNT(DISTINCT user_id) AS count
|
||||
// FROM :main_sessions_table AS sessions
|
||||
// WHERE :sub_query_chart;
|
||||
// `)
|
||||
// chQuery = strings.Replace(chQuery, ":main_sessions_table", getMainSessionsTable(startTs), -1)
|
||||
// chQuery = strings.Replace(chQuery, ":sub_query_chart", strings.Join(chSubQuery, " AND "), -1)
|
||||
//
|
||||
// var count uint64
|
||||
//
|
||||
// preparedQuery, preparedArgs = replaceNamedParams(chQuery, params)
|
||||
// if err := s.chConn.QueryRow(context.Background(), preparedQuery, preparedArgs).Scan(&count); err != nil {
|
||||
// log.Fatalf("Error executing query: %v", err)
|
||||
// }
|
||||
//
|
||||
// results["progress"] = progress(count, results["value"].(uint64))
|
||||
//
|
||||
// // TODO: this should be returned in any case
|
||||
// results["unit"] = "COUNT"
|
||||
// fmt.Println(results)
|
||||
//
|
||||
// return results, nil
|
||||
//}
|
||||
|
|
@ -41,9 +41,8 @@ type handlersImpl struct {
|
|||
|
||||
func (e *handlersImpl) GetAll() []*api.Description {
|
||||
return []*api.Description{
|
||||
{"/v1/analytics/{projectId}/cards/{id}/chart", e.getCardChartData, "POST"}, // for dashboards
|
||||
{"/v1/analytics/{projectId}/cards/{id}/chart", e.getCardChartData, "POST"},
|
||||
{"/v1/analytics/{projectId}/cards/{id}/try", e.getCardChartData, "POST"},
|
||||
{"/v1/analytics/{projectId}/cards/try", e.getCardChartData, "POST"}, // for cards itself
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -74,7 +73,7 @@ func (e *handlersImpl) getCardChartData(w http.ResponseWriter, r *http.Request)
|
|||
}
|
||||
bodySize = len(bodyBytes)
|
||||
|
||||
req := &MetricPayload{}
|
||||
req := &GetCardChartDataRequest{}
|
||||
if err := json.Unmarshal(bodyBytes, req); err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
|
|
|
|||
|
|
@ -1,236 +0,0 @@
|
|||
package charts
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"openreplay/backend/pkg/analytics/db"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type FunnelStepResult struct {
|
||||
LevelNumber uint64 `json:"step"`
|
||||
StepName string `json:"type"`
|
||||
CountAtLevel uint64 `json:"count"`
|
||||
Operator string `json:"operator"`
|
||||
Value []string `json:"value"`
|
||||
DropPct float64 `json:"dropPct"`
|
||||
}
|
||||
|
||||
type FunnelResponse struct {
|
||||
Steps []FunnelStepResult `json:"stages"`
|
||||
}
|
||||
|
||||
type FunnelQueryBuilder struct{}
|
||||
|
||||
func (f FunnelQueryBuilder) Execute(p Payload, conn db.Connector) (interface{}, error) {
|
||||
q, err := f.buildQuery(p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rows, err := conn.Query(q)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
// extract step filters
|
||||
s := p.MetricPayload.Series[0]
|
||||
var stepFilters []Filter
|
||||
for _, flt := range s.Filter.Filters {
|
||||
if flt.IsEvent {
|
||||
stepFilters = append(stepFilters, flt)
|
||||
}
|
||||
}
|
||||
|
||||
var steps []FunnelStepResult
|
||||
for rows.Next() {
|
||||
var r FunnelStepResult
|
||||
if err := rows.Scan(&r.LevelNumber, &r.StepName, &r.CountAtLevel); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
idx := int(r.LevelNumber) - 1
|
||||
if idx >= 0 && idx < len(stepFilters) {
|
||||
r.Operator = stepFilters[idx].Operator
|
||||
r.Value = stepFilters[idx].Value
|
||||
}
|
||||
steps = append(steps, r)
|
||||
}
|
||||
|
||||
// compute drop percentages
|
||||
if len(steps) > 0 {
|
||||
prev := steps[0].CountAtLevel
|
||||
steps[0].DropPct = 0
|
||||
for i := 1; i < len(steps); i++ {
|
||||
curr := steps[i].CountAtLevel
|
||||
if prev > 0 {
|
||||
steps[i].DropPct = (float64(prev-curr) / float64(prev)) * 100
|
||||
} else {
|
||||
steps[i].DropPct = 0
|
||||
}
|
||||
prev = curr
|
||||
}
|
||||
}
|
||||
|
||||
return FunnelResponse{Steps: steps}, nil
|
||||
}
|
||||
|
||||
func (f FunnelQueryBuilder) buildQuery(p Payload) (string, error) {
|
||||
if len(p.MetricPayload.Series) == 0 {
|
||||
return "", fmt.Errorf("series empty")
|
||||
}
|
||||
s := p.MetricPayload.Series[0]
|
||||
metricFormat := p.MetricPayload.MetricFormat
|
||||
|
||||
var (
|
||||
globalFilters []Filter
|
||||
stepFilters []Filter
|
||||
sessionDurationFilter *Filter
|
||||
)
|
||||
for _, flt := range s.Filter.Filters {
|
||||
if flt.IsEvent {
|
||||
stepFilters = append(stepFilters, flt)
|
||||
} else if flt.Type == "duration" {
|
||||
sessionDurationFilter = &flt
|
||||
} else {
|
||||
globalFilters = append(globalFilters, flt)
|
||||
}
|
||||
}
|
||||
|
||||
requiredColumns := make(map[string]struct{})
|
||||
var collectColumns func([]Filter)
|
||||
collectColumns = func(filters []Filter) {
|
||||
for _, flt := range filters {
|
||||
if col, ok := mainColumns[string(flt.Type)]; ok {
|
||||
requiredColumns[col] = struct{}{}
|
||||
}
|
||||
collectColumns(flt.Filters)
|
||||
}
|
||||
}
|
||||
collectColumns(globalFilters)
|
||||
collectColumns(stepFilters)
|
||||
|
||||
selectCols := []string{
|
||||
`e.created_at`,
|
||||
`e."$event_name" AS event_name`,
|
||||
`e."$properties" AS properties`,
|
||||
}
|
||||
for col := range requiredColumns {
|
||||
logical := reverseLookup(mainColumns, col)
|
||||
selectCols = append(selectCols, fmt.Sprintf(`e."%s" AS %s`, col, logical))
|
||||
}
|
||||
selectCols = append(selectCols,
|
||||
`e.session_id`,
|
||||
`e.distinct_id`,
|
||||
`s.user_id AS session_user_id`,
|
||||
fmt.Sprintf("if('%s' = 'sessionCount', toString(e.session_id), coalesce(nullif(s.user_id,''),e.distinct_id)) AS entity_id", metricFormat),
|
||||
)
|
||||
|
||||
globalConds, _ := buildEventConditions(globalFilters, BuildConditionsOptions{
|
||||
DefinedColumns: mainColumns,
|
||||
MainTableAlias: "e",
|
||||
PropertiesColumnName: "$properties",
|
||||
})
|
||||
|
||||
base := []string{
|
||||
fmt.Sprintf("e.created_at >= toDateTime(%d/1000)", p.MetricPayload.StartTimestamp),
|
||||
fmt.Sprintf("e.created_at < toDateTime(%d/1000)", p.MetricPayload.EndTimestamp+86400000),
|
||||
fmt.Sprintf("e.project_id = %d", p.ProjectId),
|
||||
}
|
||||
base = append(base, globalConds...)
|
||||
if sessionDurationFilter != nil {
|
||||
vals := sessionDurationFilter.Value
|
||||
if len(vals) > 0 && vals[0] != "" {
|
||||
base = append(base, fmt.Sprintf("s.duration >= %s", vals[0]))
|
||||
}
|
||||
if len(vals) > 1 && vals[1] != "" {
|
||||
base = append(base, fmt.Sprintf("s.duration <= %s", vals[1]))
|
||||
}
|
||||
}
|
||||
where := strings.Join(base, " AND ")
|
||||
|
||||
var (
|
||||
stepNames []string
|
||||
stepExprs []string
|
||||
clickCount int
|
||||
)
|
||||
for i, flt := range stepFilters {
|
||||
stepNames = append(stepNames, fmt.Sprintf("'%s'", flt.Type))
|
||||
conds, _ := buildEventConditions([]Filter{flt}, BuildConditionsOptions{
|
||||
DefinedColumns: cteColumnAliases(),
|
||||
PropertiesColumnName: "properties",
|
||||
MainTableAlias: "",
|
||||
})
|
||||
var exprParts []string
|
||||
exprParts = append(exprParts, fmt.Sprintf("event_name = funnel_steps[%d]", i+1))
|
||||
if flt.Type == "CLICK" {
|
||||
clickCount++
|
||||
exprParts = append(exprParts, fmt.Sprintf("click_idx = %d", clickCount))
|
||||
}
|
||||
exprParts = append(exprParts, conds...)
|
||||
stepExprs = append(stepExprs, fmt.Sprintf("(%s)", strings.Join(exprParts, " AND ")))
|
||||
}
|
||||
|
||||
stepsArr := fmt.Sprintf("[%s]", strings.Join(stepNames, ","))
|
||||
windowArgs := strings.Join(stepExprs, ",\n ")
|
||||
|
||||
q := fmt.Sprintf(`
|
||||
WITH
|
||||
%s AS funnel_steps,
|
||||
86400 AS funnel_window_seconds,
|
||||
events_for_funnel AS (
|
||||
SELECT
|
||||
%s
|
||||
FROM product_analytics.events AS e
|
||||
JOIN experimental.sessions AS s USING(session_id)
|
||||
WHERE %s
|
||||
ORDER BY e.session_id, e.created_at
|
||||
),
|
||||
numbered_clicks AS (
|
||||
SELECT
|
||||
entity_id,
|
||||
created_at,
|
||||
row_number() OVER (PARTITION BY entity_id ORDER BY created_at) AS click_idx
|
||||
FROM events_for_funnel
|
||||
WHERE event_name = 'CLICK'
|
||||
),
|
||||
funnel_levels_reached AS (
|
||||
SELECT
|
||||
ef.entity_id,
|
||||
windowFunnel(funnel_window_seconds)(
|
||||
toDateTime(ef.created_at),
|
||||
%s
|
||||
) AS max_level
|
||||
FROM events_for_funnel ef
|
||||
LEFT JOIN numbered_clicks nc
|
||||
ON ef.entity_id = nc.entity_id
|
||||
AND ef.created_at = nc.created_at
|
||||
GROUP BY ef.entity_id
|
||||
),
|
||||
counts_by_level AS (
|
||||
SELECT
|
||||
seq.number + 1 AS level_number,
|
||||
countDistinctIf(entity_id, max_level >= seq.number + 1) AS cnt
|
||||
FROM funnel_levels_reached
|
||||
CROSS JOIN numbers(length(funnel_steps)) AS seq
|
||||
GROUP BY seq.number
|
||||
),
|
||||
step_list AS (
|
||||
SELECT
|
||||
seq.number + 1 AS level_number,
|
||||
funnel_steps[seq.number + 1] AS step_name
|
||||
FROM numbers(length(funnel_steps)) AS seq
|
||||
)
|
||||
SELECT
|
||||
s.level_number,
|
||||
s.step_name,
|
||||
ifNull(c.cnt, 0) AS count_at_level
|
||||
FROM step_list AS s
|
||||
LEFT JOIN counts_by_level AS c ON s.level_number = c.level_number
|
||||
ORDER BY s.level_number;`,
|
||||
stepsArr,
|
||||
strings.Join(selectCols, ",\n "),
|
||||
where,
|
||||
windowArgs,
|
||||
)
|
||||
|
||||
return q, nil
|
||||
}
|
||||
|
|
@ -1,100 +0,0 @@
|
|||
package charts
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"openreplay/backend/pkg/analytics/db"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type HeatmapPoint struct {
|
||||
NormalizedX float64 `json:"normalizedX"`
|
||||
NormalizedY float64 `json:"normalizedY"`
|
||||
}
|
||||
|
||||
type HeatmapResponse struct {
|
||||
Points []HeatmapPoint `json:"data"`
|
||||
}
|
||||
|
||||
type HeatmapQueryBuilder struct{}
|
||||
|
||||
func (h HeatmapQueryBuilder) Execute(p Payload, conn db.Connector) (interface{}, error) {
|
||||
q, err := h.buildQuery(p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rows, err := conn.Query(q)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var pts []HeatmapPoint
|
||||
for rows.Next() {
|
||||
var x, y float64
|
||||
if err := rows.Scan(&x, &y); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
pts = append(pts, HeatmapPoint{x, y})
|
||||
}
|
||||
|
||||
return HeatmapResponse{
|
||||
Points: pts,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (h HeatmapQueryBuilder) buildQuery(p Payload) (string, error) {
|
||||
if len(p.MetricPayload.Series) == 0 {
|
||||
return "", fmt.Errorf("series empty")
|
||||
}
|
||||
s := p.MetricPayload.Series[0]
|
||||
|
||||
var globalFilters, eventFilters []Filter
|
||||
for _, flt := range s.Filter.Filters {
|
||||
if flt.IsEvent {
|
||||
eventFilters = append(eventFilters, flt)
|
||||
} else {
|
||||
globalFilters = append(globalFilters, flt)
|
||||
}
|
||||
}
|
||||
|
||||
globalConds, _ := buildEventConditions(globalFilters, BuildConditionsOptions{
|
||||
DefinedColumns: mainColumns,
|
||||
MainTableAlias: "e",
|
||||
})
|
||||
|
||||
eventConds, _ := buildEventConditions(eventFilters, BuildConditionsOptions{
|
||||
DefinedColumns: mainColumns,
|
||||
MainTableAlias: "e",
|
||||
})
|
||||
|
||||
base := []string{
|
||||
fmt.Sprintf("e.created_at >= toDateTime(%d/1000)", p.MetricPayload.StartTimestamp),
|
||||
fmt.Sprintf("e.created_at < toDateTime(%d/1000)", p.MetricPayload.EndTimestamp),
|
||||
fmt.Sprintf("e.project_id = %d", p.ProjectId),
|
||||
"e.session_id IS NOT NULL",
|
||||
"e.`$event_name` = 'CLICK'",
|
||||
}
|
||||
base = append(base, globalConds...)
|
||||
|
||||
//if len(globalNames) > 0 {
|
||||
// base = append(base, "e.`$event_name` IN ("+buildInClause(globalNames)+")")
|
||||
//}
|
||||
|
||||
//if len(eventNames) > 0 {
|
||||
// base = append(base, "e.`$event_name` IN ("+buildInClause(eventNames)+")")
|
||||
//}
|
||||
|
||||
base = append(base, eventConds...)
|
||||
|
||||
where := strings.Join(base, " AND ")
|
||||
|
||||
q := fmt.Sprintf(`
|
||||
SELECT
|
||||
JSONExtractFloat(toString(e."$properties"), 'normalized_x') AS normalized_x,
|
||||
JSONExtractFloat(toString(e."$properties"), 'normalized_y') AS normalized_y
|
||||
FROM product_analytics.events AS e
|
||||
-- JOIN experimental.sessions AS s USING(session_id)
|
||||
WHERE %s LIMIT 500;`, where)
|
||||
|
||||
return q, nil
|
||||
}
|
||||
|
|
@ -1,96 +0,0 @@
|
|||
package charts
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"openreplay/backend/pkg/analytics/db"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type HeatmapSessionResponse struct {
|
||||
SessionID uint64 `json:"session_id"`
|
||||
StartTs uint64 `json:"start_ts"`
|
||||
Duration uint32 `json:"duration"`
|
||||
EventTimestamp uint64 `json:"event_timestamp"`
|
||||
}
|
||||
|
||||
type HeatmapSessionQueryBuilder struct{}
|
||||
|
||||
func (h HeatmapSessionQueryBuilder) Execute(p Payload, conn db.Connector) (interface{}, error) {
|
||||
shortestQ, err := h.buildQuery(p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var sid uint64
|
||||
var startTs uint64
|
||||
var duration uint32
|
||||
var eventTs uint64
|
||||
row, err := conn.QueryRow(shortestQ)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := row.Scan(&sid, &startTs, &duration, &eventTs); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// TODO get mob urls
|
||||
|
||||
return HeatmapSessionResponse{
|
||||
SessionID: sid,
|
||||
StartTs: startTs,
|
||||
Duration: duration,
|
||||
EventTimestamp: eventTs,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (h HeatmapSessionQueryBuilder) buildQuery(p Payload) (string, error) {
|
||||
if len(p.MetricPayload.Series) == 0 {
|
||||
return "", fmt.Errorf("series empty")
|
||||
}
|
||||
s := p.MetricPayload.Series[0]
|
||||
|
||||
var globalFilters, eventFilters []Filter
|
||||
for _, flt := range s.Filter.Filters {
|
||||
if flt.IsEvent {
|
||||
eventFilters = append(eventFilters, flt)
|
||||
} else {
|
||||
globalFilters = append(globalFilters, flt)
|
||||
}
|
||||
}
|
||||
|
||||
globalConds, _ := buildEventConditions(globalFilters, BuildConditionsOptions{
|
||||
DefinedColumns: mainColumns,
|
||||
MainTableAlias: "e",
|
||||
})
|
||||
|
||||
eventConds, _ := buildEventConditions(eventFilters, BuildConditionsOptions{
|
||||
DefinedColumns: mainColumns,
|
||||
MainTableAlias: "e",
|
||||
})
|
||||
|
||||
base := []string{
|
||||
fmt.Sprintf("e.created_at >= toDateTime(%d/1000)", p.MetricPayload.StartTimestamp),
|
||||
fmt.Sprintf("e.created_at < toDateTime(%d/1000)", p.MetricPayload.EndTimestamp+86400000),
|
||||
fmt.Sprintf("e.project_id = %d", p.ProjectId),
|
||||
"s.duration > 500",
|
||||
"e.`$event_name` = 'LOCATION'",
|
||||
}
|
||||
base = append(base, eventConds...)
|
||||
base = append(base, globalConds...)
|
||||
|
||||
where := strings.Join(base, " AND ")
|
||||
|
||||
q := fmt.Sprintf(`
|
||||
SELECT
|
||||
s.session_id,
|
||||
toUnixTimestamp(s.datetime) * 1000 as startTs,
|
||||
s.duration,
|
||||
toUnixTimestamp(e.created_at) * 1000 as eventTs
|
||||
FROM product_analytics.events AS e
|
||||
JOIN experimental.sessions AS s USING(session_id)
|
||||
WHERE %s
|
||||
ORDER BY e.created_at ASC, s.duration ASC
|
||||
LIMIT 1;`, where)
|
||||
|
||||
return q, nil
|
||||
}
|
||||
|
|
@ -1,241 +0,0 @@
|
|||
package charts
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"openreplay/backend/pkg/analytics/db"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var validMetricOfValues = map[MetricOfTable]struct{}{
|
||||
MetricOfTableBrowser: {},
|
||||
MetricOfTableDevice: {},
|
||||
MetricOfTableCountry: {},
|
||||
MetricOfTableUserId: {},
|
||||
MetricOfTableLocation: {},
|
||||
MetricOfTableReferrer: {},
|
||||
MetricOfTableFetch: {},
|
||||
}
|
||||
|
||||
type TableQueryBuilder struct{}
|
||||
|
||||
type TableValue struct {
|
||||
Name string `json:"name"`
|
||||
Total uint64 `json:"total"`
|
||||
}
|
||||
|
||||
type TableResponse struct {
|
||||
Total uint64 `json:"total"`
|
||||
Count uint64 `json:"count"`
|
||||
Values []TableValue `json:"values"`
|
||||
}
|
||||
|
||||
const (
|
||||
MetricFormatSessionCount = "sessionCount"
|
||||
MetricFormatUserCount = "userCount"
|
||||
nilUUIDString = "00000000-0000-0000-0000-000000000000"
|
||||
)
|
||||
|
||||
var propertySelectorMap = map[string]string{
|
||||
string(MetricOfTableLocation): "JSONExtractString(toString(main.$properties), 'url_path') AS metric_value",
|
||||
//string(MetricOfTableUserId): "if(empty(sessions.user_id), 'Anonymous', sessions.user_id) AS metric_value",
|
||||
string(MetricOfTableUserId): "if(empty(sessions.user_id) OR sessions.user_id IS NULL, 'Anonymous', sessions.user_id) AS metric_value",
|
||||
string(MetricOfTableBrowser): "main.$browser AS metric_value",
|
||||
//string(MetricOfTableDevice): "sessions.user_device AS metric_value",
|
||||
string(MetricOfTableDevice): "if(empty(sessions.user_device) OR sessions.user_device IS NULL, 'Undefined', sessions.user_device) AS metric_value",
|
||||
string(MetricOfTableCountry): "toString(sessions.user_country) AS metric_value",
|
||||
string(MetricOfTableReferrer): "main.$referrer AS metric_value",
|
||||
string(MetricOfTableFetch): "JSONExtractString(toString(main.$properties), 'url_path') AS metric_value",
|
||||
}
|
||||
|
||||
var mainColumns = map[string]string{
|
||||
"userBrowser": "$browser",
|
||||
"userDevice": "sessions.user_device",
|
||||
"referrer": "$referrer",
|
||||
"fetchDuration": "$duration_s",
|
||||
"ISSUE": "issue_type",
|
||||
}
|
||||
|
||||
func (t TableQueryBuilder) Execute(p Payload, conn db.Connector) (interface{}, error) {
|
||||
if p.MetricOf == "" {
|
||||
return nil, fmt.Errorf("MetricOf is empty")
|
||||
}
|
||||
|
||||
if _, ok := validMetricOfValues[MetricOfTable(p.MetricOf)]; !ok {
|
||||
return nil, fmt.Errorf("invalid MetricOf value: %s", p.MetricOf)
|
||||
}
|
||||
|
||||
metricFormat := p.MetricFormat
|
||||
if metricFormat != MetricFormatSessionCount && metricFormat != MetricFormatUserCount {
|
||||
metricFormat = MetricFormatSessionCount
|
||||
}
|
||||
|
||||
query, err := t.buildQuery(p, metricFormat)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error building query: %w", err)
|
||||
}
|
||||
|
||||
rows, err := conn.Query(query)
|
||||
if err != nil {
|
||||
log.Printf("Error executing query: %s\nQuery: %s", err, query)
|
||||
return nil, fmt.Errorf("error executing query: %w", err)
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var overallTotalMetricValues uint64
|
||||
var overallCount uint64
|
||||
values := make([]TableValue, 0)
|
||||
firstRow := true
|
||||
|
||||
for rows.Next() {
|
||||
var (
|
||||
name string
|
||||
valueSpecificCount uint64
|
||||
tempOverallTotalMetricValues uint64
|
||||
tempOverallCount uint64
|
||||
)
|
||||
|
||||
if err := rows.Scan(&tempOverallTotalMetricValues, &name, &valueSpecificCount, &tempOverallCount); err != nil {
|
||||
return nil, fmt.Errorf("error scanning row: %w", err)
|
||||
}
|
||||
|
||||
if firstRow {
|
||||
overallTotalMetricValues = tempOverallTotalMetricValues
|
||||
overallCount = tempOverallCount
|
||||
firstRow = false
|
||||
}
|
||||
values = append(values, TableValue{Name: name, Total: valueSpecificCount})
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, fmt.Errorf("error iterating rows: %w", err)
|
||||
}
|
||||
|
||||
return &TableResponse{
|
||||
Total: overallTotalMetricValues,
|
||||
Count: overallCount,
|
||||
Values: values,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (t TableQueryBuilder) buildQuery(r Payload, metricFormat string) (string, error) {
|
||||
if len(r.Series) == 0 {
|
||||
return "", fmt.Errorf("payload Series cannot be empty")
|
||||
}
|
||||
s := r.Series[0]
|
||||
|
||||
// sessions_data WHERE conditions
|
||||
durConds, _ := buildDurationWhere(s.Filter.Filters)
|
||||
sessFilters, _ := filterOutTypes(s.Filter.Filters, []FilterType{FilterDuration, FilterUserAnonymousId})
|
||||
sessConds, evtNames := buildEventConditions(sessFilters, BuildConditionsOptions{DefinedColumns: mainColumns, MainTableAlias: "main"})
|
||||
sessionDataConds := append(durConds, sessConds...)
|
||||
// date range for sessions_data
|
||||
sessionDataConds = append(sessionDataConds,
|
||||
fmt.Sprintf("main.created_at BETWEEN toDateTime(%d/1000) AND toDateTime(%d/1000)", r.StartTimestamp, r.EndTimestamp),
|
||||
)
|
||||
// clean empty
|
||||
var sdClean []string
|
||||
for _, c := range sessionDataConds {
|
||||
if strings.TrimSpace(c) != "" {
|
||||
sdClean = append(sdClean, c)
|
||||
}
|
||||
}
|
||||
sessionDataWhere := ""
|
||||
if len(sdClean) > 0 {
|
||||
sessionDataWhere = "WHERE " + strings.Join(sdClean, " AND ")
|
||||
}
|
||||
if len(evtNames) > 0 {
|
||||
sessionDataWhere += fmt.Sprintf(" AND main.$event_name IN ('%s')", strings.Join(evtNames, "','"))
|
||||
}
|
||||
|
||||
// filtered_data WHERE conditions
|
||||
propSel, ok := propertySelectorMap[r.MetricOf]
|
||||
if !ok {
|
||||
propSel = fmt.Sprintf("JSONExtractString(toString(main.$properties), '%s') AS metric_value", r.MetricOf)
|
||||
}
|
||||
parts := strings.SplitN(propSel, " AS ", 2)
|
||||
propertyExpr := parts[0]
|
||||
|
||||
tAgg := "main.session_id"
|
||||
specConds := []string{}
|
||||
if metricFormat == MetricFormatUserCount {
|
||||
tAgg = "if(empty(sessions.user_id), toString(sessions.user_uuid), sessions.user_id)"
|
||||
specConds = append(specConds,
|
||||
fmt.Sprintf("NOT (empty(sessions.user_id) AND (sessions.user_uuid IS NULL OR sessions.user_uuid = '%s'))", nilUUIDString),
|
||||
)
|
||||
}
|
||||
|
||||
// metric-specific filter
|
||||
_, mFilt := filterOutTypes(s.Filter.Filters, []FilterType{FilterType(r.MetricOf)})
|
||||
metricCond := eventNameCondition("", r.MetricOf)
|
||||
if len(mFilt) > 0 {
|
||||
//conds, _ := buildEventConditions(mFilt, BuildConditionsOptions{DefinedColumns: map[string]string{"userId": "user_id"}, MainTableAlias: "main"})
|
||||
//metricCond = strings.Join(conds, " AND ")
|
||||
}
|
||||
|
||||
filteredConds := []string{
|
||||
fmt.Sprintf("main.project_id = %d", r.ProjectId),
|
||||
metricCond,
|
||||
fmt.Sprintf("main.created_at BETWEEN toDateTime(%d/1000) AND toDateTime(%d/1000)", r.StartTimestamp, r.EndTimestamp),
|
||||
}
|
||||
filteredConds = append(filteredConds, specConds...)
|
||||
// clean empty
|
||||
var fClean []string
|
||||
for _, c := range filteredConds {
|
||||
if strings.TrimSpace(c) != "" {
|
||||
fClean = append(fClean, c)
|
||||
}
|
||||
}
|
||||
filteredWhere := ""
|
||||
if len(fClean) > 0 {
|
||||
filteredWhere = "WHERE " + strings.Join(fClean, " AND ")
|
||||
}
|
||||
|
||||
limit := r.Limit
|
||||
if limit <= 0 {
|
||||
limit = 10
|
||||
}
|
||||
offset := (r.Page - 1) * limit
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
WITH sessions_data AS (
|
||||
SELECT session_id
|
||||
FROM product_analytics.events AS main
|
||||
JOIN experimental.sessions AS sessions USING (session_id)
|
||||
%s
|
||||
GROUP BY session_id
|
||||
),
|
||||
filtered_data AS (
|
||||
SELECT %s AS name, %s AS session_id
|
||||
FROM product_analytics.events AS main
|
||||
JOIN sessions_data USING (session_id)
|
||||
JOIN experimental.sessions AS sessions USING (session_id)
|
||||
%s
|
||||
),
|
||||
totals AS (
|
||||
SELECT count() AS overall_total_metric_values,
|
||||
countDistinct(session_id) AS overall_total_count
|
||||
FROM filtered_data
|
||||
),
|
||||
grouped_values AS (
|
||||
SELECT name,
|
||||
countDistinct(session_id) AS value_count
|
||||
FROM filtered_data
|
||||
GROUP BY name
|
||||
)
|
||||
SELECT t.overall_total_metric_values,
|
||||
g.name,
|
||||
g.value_count,
|
||||
t.overall_total_count
|
||||
FROM grouped_values AS g
|
||||
CROSS JOIN totals AS t
|
||||
ORDER BY g.value_count DESC
|
||||
LIMIT %d OFFSET %d;`,
|
||||
sessionDataWhere,
|
||||
propertyExpr,
|
||||
tAgg,
|
||||
filteredWhere,
|
||||
limit,
|
||||
offset,
|
||||
)
|
||||
return query, nil
|
||||
}
|
||||
|
|
@ -1,188 +0,0 @@
|
|||
package charts
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"strings"
|
||||
|
||||
"openreplay/backend/pkg/analytics/db"
|
||||
)
|
||||
|
||||
type TableErrorsQueryBuilder struct{}
|
||||
|
||||
type ErrorChartPoint struct {
|
||||
Timestamp int64 `json:"timestamp"`
|
||||
Count uint64 `json:"count"`
|
||||
}
|
||||
|
||||
type ErrorItem struct {
|
||||
ErrorID string `json:"errorId"`
|
||||
Name string `json:"name"`
|
||||
Message string `json:"message"`
|
||||
Users uint64 `json:"users"`
|
||||
Total uint64 `json:"total"`
|
||||
Sessions uint64 `json:"sessions"`
|
||||
FirstOccurrence int64 `json:"firstOccurrence"`
|
||||
LastOccurrence int64 `json:"lastOccurrence"`
|
||||
Chart []ErrorChartPoint `json:"chart"`
|
||||
}
|
||||
|
||||
type TableErrorsResponse struct {
|
||||
Total uint64 `json:"total"`
|
||||
Errors []ErrorItem `json:"errors"`
|
||||
}
|
||||
|
||||
func (t TableErrorsQueryBuilder) Execute(p Payload, conn db.Connector) (interface{}, error) {
|
||||
query, err := t.buildQuery(p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rows, err := conn.Query(query)
|
||||
if err != nil {
|
||||
log.Printf("Error executing query: %s\nQuery: %s", err, query)
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var resp TableErrorsResponse
|
||||
for rows.Next() {
|
||||
var e ErrorItem
|
||||
var ts []int64
|
||||
var cs []uint64
|
||||
if err := rows.Scan(
|
||||
&e.ErrorID, &e.Name, &e.Message,
|
||||
&e.Users, &e.Total, &e.Sessions,
|
||||
&e.FirstOccurrence, &e.LastOccurrence,
|
||||
&ts, &cs,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for i := range ts {
|
||||
e.Chart = append(e.Chart, ErrorChartPoint{Timestamp: ts[i], Count: cs[i]})
|
||||
}
|
||||
resp.Errors = append(resp.Errors, e)
|
||||
}
|
||||
resp.Total = uint64(len(resp.Errors))
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (t TableErrorsQueryBuilder) buildQuery(p Payload) (string, error) {
|
||||
if len(p.Series) == 0 {
|
||||
return "", fmt.Errorf("payload Series cannot be empty")
|
||||
}
|
||||
|
||||
density := p.Density
|
||||
if density < 2 {
|
||||
density = 7
|
||||
}
|
||||
durMs := p.EndTimestamp - p.StartTimestamp
|
||||
stepMs := durMs / int64(density-1)
|
||||
startMs := (p.StartTimestamp / 1000) * 1000
|
||||
endMs := (p.EndTimestamp / 1000) * 1000
|
||||
|
||||
limit := p.Limit
|
||||
if limit <= 0 {
|
||||
limit = 10
|
||||
}
|
||||
page := p.Page
|
||||
if page <= 0 {
|
||||
page = 1
|
||||
}
|
||||
offset := (page - 1) * limit
|
||||
|
||||
ef, en := buildEventConditions(
|
||||
p.Series[0].Filter.Filters,
|
||||
BuildConditionsOptions{DefinedColumns: mainColumns},
|
||||
)
|
||||
conds := []string{
|
||||
"`$event_name` = 'ERROR'",
|
||||
fmt.Sprintf("project_id = %d", p.ProjectId),
|
||||
fmt.Sprintf("created_at >= toDateTime(%d/1000)", startMs),
|
||||
fmt.Sprintf("created_at <= toDateTime(%d/1000)", endMs),
|
||||
}
|
||||
if len(ef) > 0 {
|
||||
conds = append(conds, ef...)
|
||||
}
|
||||
if len(en) > 0 {
|
||||
conds = append(conds, "`$event_name` IN ("+buildInClause(en)+")")
|
||||
}
|
||||
whereClause := strings.Join(conds, " AND ")
|
||||
|
||||
sql := fmt.Sprintf(`WITH
|
||||
events AS (
|
||||
SELECT
|
||||
error_id,
|
||||
JSONExtractString(toString("$properties"), 'name') AS name,
|
||||
JSONExtractString(toString("$properties"), 'message') AS message,
|
||||
distinct_id,
|
||||
session_id,
|
||||
created_at
|
||||
FROM product_analytics.events
|
||||
WHERE %s
|
||||
),
|
||||
sessions_per_interval AS (
|
||||
SELECT
|
||||
error_id,
|
||||
toUInt64(%d + (toUInt64((toUnixTimestamp64Milli(created_at) - %d) / %d) * %d)) AS bucket_ts,
|
||||
countDistinct(session_id) AS session_count
|
||||
FROM events
|
||||
GROUP BY error_id, bucket_ts
|
||||
),
|
||||
buckets AS (
|
||||
SELECT
|
||||
toUInt64(generate_series) AS bucket_ts
|
||||
FROM generate_series(
|
||||
%d,
|
||||
%d,
|
||||
%d
|
||||
)
|
||||
),
|
||||
error_meta AS (
|
||||
SELECT
|
||||
error_id,
|
||||
name,
|
||||
message,
|
||||
countDistinct(distinct_id) AS users,
|
||||
count() AS total,
|
||||
countDistinct(session_id) AS sessions,
|
||||
min(created_at) AS first_occurrence,
|
||||
max(created_at) AS last_occurrence
|
||||
FROM events
|
||||
GROUP BY error_id, name, message
|
||||
),
|
||||
error_chart AS (
|
||||
SELECT
|
||||
e.error_id AS error_id,
|
||||
groupArray(b.bucket_ts) AS timestamps,
|
||||
groupArray(coalesce(s.session_count, 0)) AS counts
|
||||
FROM (SELECT DISTINCT error_id FROM events) AS e
|
||||
CROSS JOIN buckets AS b
|
||||
LEFT JOIN sessions_per_interval AS s
|
||||
ON s.error_id = e.error_id
|
||||
AND s.bucket_ts = b.bucket_ts
|
||||
GROUP BY e.error_id
|
||||
)
|
||||
SELECT
|
||||
m.error_id,
|
||||
m.name,
|
||||
m.message,
|
||||
m.users,
|
||||
m.total,
|
||||
m.sessions,
|
||||
toUnixTimestamp64Milli(toDateTime64(m.first_occurrence, 3)) AS first_occurrence,
|
||||
toUnixTimestamp64Milli(toDateTime64(m.last_occurrence, 3)) AS last_occurrence,
|
||||
ec.timestamps,
|
||||
ec.counts
|
||||
FROM error_meta AS m
|
||||
LEFT JOIN error_chart AS ec
|
||||
ON m.error_id = ec.error_id
|
||||
ORDER BY m.last_occurrence DESC
|
||||
LIMIT %d OFFSET %d;`,
|
||||
whereClause,
|
||||
startMs, startMs, stepMs, stepMs, // New formula parameters
|
||||
startMs, endMs, stepMs,
|
||||
limit, offset,
|
||||
)
|
||||
|
||||
return sql, nil
|
||||
}
|
||||
|
|
@ -1,147 +0,0 @@
|
|||
package charts
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"openreplay/backend/pkg/analytics/db"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type TimeSeriesQueryBuilder struct{}
|
||||
|
||||
func (t TimeSeriesQueryBuilder) Execute(p Payload, conn db.Connector) (interface{}, error) {
|
||||
data := make(map[uint64]map[string]uint64)
|
||||
for _, series := range p.Series {
|
||||
query, err := t.buildQuery(p, series)
|
||||
if err != nil {
|
||||
log.Printf("buildQuery %s: %v", series.Name, err)
|
||||
return nil, fmt.Errorf("series %s: %v", series.Name, err)
|
||||
}
|
||||
rows, err := conn.Query(query)
|
||||
if err != nil {
|
||||
log.Printf("exec %s: %v", series.Name, err)
|
||||
return nil, fmt.Errorf("series %s: %v", series.Name, err)
|
||||
}
|
||||
var pts []DataPoint
|
||||
for rows.Next() {
|
||||
var dp DataPoint
|
||||
if err := rows.Scan(&dp.Timestamp, &dp.Count); err != nil {
|
||||
rows.Close()
|
||||
return nil, err
|
||||
}
|
||||
pts = append(pts, dp)
|
||||
}
|
||||
rows.Close()
|
||||
|
||||
filled := FillMissingDataPoints(p.StartTimestamp, p.EndTimestamp, p.Density, DataPoint{}, pts, 1000)
|
||||
for _, dp := range filled {
|
||||
if data[dp.Timestamp] == nil {
|
||||
data[dp.Timestamp] = map[string]uint64{}
|
||||
}
|
||||
data[dp.Timestamp][series.Name] = dp.Count
|
||||
}
|
||||
}
|
||||
|
||||
var timestamps []uint64
|
||||
for ts := range data {
|
||||
timestamps = append(timestamps, ts)
|
||||
}
|
||||
sort.Slice(timestamps, func(i, j int) bool { return timestamps[i] < timestamps[j] })
|
||||
|
||||
var result []map[string]interface{}
|
||||
for _, ts := range timestamps {
|
||||
row := map[string]interface{}{"timestamp": ts}
|
||||
for _, series := range p.Series {
|
||||
row[series.Name] = data[ts][series.Name]
|
||||
}
|
||||
result = append(result, row)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (t TimeSeriesQueryBuilder) buildQuery(p Payload, s Series) (string, error) {
|
||||
switch p.MetricOf {
|
||||
case "sessionCount":
|
||||
return t.buildTimeSeriesQuery(p, s, "sessionCount", "session_id"), nil
|
||||
case "userCount":
|
||||
return t.buildTimeSeriesQuery(p, s, "userCount", "user_id"), nil
|
||||
default:
|
||||
return "", fmt.Errorf("unsupported metric %q", p.MetricOf)
|
||||
}
|
||||
}
|
||||
|
||||
func (t TimeSeriesQueryBuilder) buildTimeSeriesQuery(p Payload, s Series, metric, idField string) string {
|
||||
sub := t.buildSubQuery(p, s, metric)
|
||||
step := int(getStepSize(p.StartTimestamp, p.EndTimestamp, p.Density, false, 1000)) * 1000
|
||||
|
||||
return fmt.Sprintf(
|
||||
"SELECT gs.generate_series AS timestamp, COALESCE(COUNT(DISTINCT ps.%s),0) AS count "+
|
||||
"FROM generate_series(%d,%d,%d) AS gs "+
|
||||
"LEFT JOIN (%s) AS ps ON TRUE "+
|
||||
"WHERE ps.datetime >= toDateTime(timestamp/1000) AND ps.datetime < toDateTime((timestamp+%d)/1000) "+
|
||||
"GROUP BY timestamp ORDER BY timestamp;",
|
||||
idField, p.StartTimestamp, p.EndTimestamp, step, sub, step,
|
||||
)
|
||||
}
|
||||
|
||||
func (t TimeSeriesQueryBuilder) buildSubQuery(p Payload, s Series, metric string) string {
|
||||
evConds, evNames := buildEventConditions(s.Filter.Filters, BuildConditionsOptions{
|
||||
DefinedColumns: mainColumns,
|
||||
MainTableAlias: "main",
|
||||
PropertiesColumnName: "$properties",
|
||||
})
|
||||
sessConds := buildSessionConditions(s.Filter.Filters)
|
||||
staticEvt := buildStaticEventWhere(p)
|
||||
sessWhere, sessJoin := buildStaticSessionWhere(p, sessConds)
|
||||
|
||||
if len(evConds) == 0 && len(evNames) == 0 {
|
||||
if metric == "sessionCount" {
|
||||
return fmt.Sprintf(
|
||||
"SELECT s.session_id AS session_id, s.datetime AS datetime "+
|
||||
"FROM experimental.sessions AS s WHERE %s",
|
||||
sessJoin,
|
||||
)
|
||||
}
|
||||
return fmt.Sprintf(
|
||||
"SELECT multiIf(s.user_id!='',s.user_id,s.user_anonymous_id!='',s.user_anonymous_id,toString(s.user_uuid)) AS user_id, s.datetime AS datetime "+
|
||||
"FROM experimental.sessions AS s WHERE %s",
|
||||
sessJoin,
|
||||
)
|
||||
}
|
||||
|
||||
uniq := make([]string, 0, len(evNames))
|
||||
for _, name := range evNames {
|
||||
if !contains(uniq, name) {
|
||||
uniq = append(uniq, name)
|
||||
}
|
||||
}
|
||||
nameClause := ""
|
||||
if len(uniq) > 0 {
|
||||
nameClause = fmt.Sprintf("AND main.`$event_name` IN (%s) ", buildInClause(uniq))
|
||||
}
|
||||
|
||||
having := ""
|
||||
if len(evConds) > 0 {
|
||||
having = buildHavingClause(evConds)
|
||||
}
|
||||
|
||||
whereEvt := staticEvt
|
||||
if len(evConds) > 0 {
|
||||
whereEvt += " AND " + strings.Join(evConds, " AND ")
|
||||
}
|
||||
|
||||
proj := map[string]string{
|
||||
"sessionCount": "s.session_id AS session_id",
|
||||
"userCount": "multiIf(s.user_id!='',s.user_id,s.user_anonymous_id!='',s.user_anonymous_id,toString(s.user_uuid)) AS user_id",
|
||||
}[metric] + ", s.datetime AS datetime"
|
||||
|
||||
return fmt.Sprintf(
|
||||
"SELECT %s FROM (SELECT main.session_id, MIN(main.created_at) AS first_event_ts, MAX(main.created_at) AS last_event_ts "+
|
||||
"FROM product_analytics.events AS main "+
|
||||
"WHERE %s AND main.session_id IN (SELECT s.session_id FROM experimental.sessions AS s WHERE %s) %s "+
|
||||
"GROUP BY main.session_id %s "+
|
||||
"INNER JOIN (SELECT * FROM experimental.sessions AS s WHERE %s) AS s ON s.session_id=f.session_id",
|
||||
proj, whereEvt, sessWhere, nameClause, having, sessJoin,
|
||||
)
|
||||
}
|
||||
|
|
@ -1,764 +0,0 @@
|
|||
package charts
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"openreplay/backend/pkg/analytics/db"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Node represents a point in the journey diagram.
|
||||
type Node struct {
|
||||
Depth int `json:"depth"`
|
||||
Name string `json:"name"`
|
||||
EventType string `json:"eventType"`
|
||||
ID int `json:"id"`
|
||||
StartingNode bool `json:"startingNode"`
|
||||
}
|
||||
|
||||
// Link represents a transition between nodes.
|
||||
type Link struct {
|
||||
EventType string `json:"eventType"`
|
||||
SessionsCount int `json:"sessionsCount"`
|
||||
Value float64 `json:"value"`
|
||||
Source int `json:"source"`
|
||||
Target int `json:"target"`
|
||||
}
|
||||
|
||||
// JourneyData holds all nodes and links for the response.
|
||||
type JourneyData struct {
|
||||
Nodes []Node `json:"nodes"`
|
||||
Links []Link `json:"links"`
|
||||
}
|
||||
|
||||
// JourneyResponse is the API response structure.
|
||||
type JourneyResponse struct {
|
||||
Data JourneyData `json:"data"`
|
||||
}
|
||||
|
||||
// UserJourneyQueryBuilder builds and executes the journey query.
|
||||
type UserJourneyQueryBuilder struct{}
|
||||
|
||||
func (h UserJourneyQueryBuilder) Execute(p Payload, conn db.Connector) (interface{}, error) {
|
||||
q, err := h.buildQuery(p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rows, err := conn.Query(q)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
type row struct {
|
||||
Stage int64
|
||||
CurrentEventName string
|
||||
CurrentEventProperty string
|
||||
PrevEventName string
|
||||
PrevEventProperty string
|
||||
SessionsCount uint64
|
||||
}
|
||||
|
||||
// Parse all rows into a slice
|
||||
var rawData []row
|
||||
for rows.Next() {
|
||||
var r row
|
||||
if err := rows.Scan(
|
||||
&r.Stage,
|
||||
&r.CurrentEventName,
|
||||
&r.CurrentEventProperty,
|
||||
&r.PrevEventName,
|
||||
&r.PrevEventProperty,
|
||||
&r.SessionsCount,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if r.SessionsCount == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
rawData = append(rawData, r)
|
||||
}
|
||||
|
||||
// Group data by stage
|
||||
dataByStage := make(map[int64][]row)
|
||||
var minStage int64 = 0
|
||||
var maxStage int64 = 0
|
||||
|
||||
for _, r := range rawData {
|
||||
dataByStage[r.Stage] = append(dataByStage[r.Stage], r)
|
||||
if r.Stage > maxStage {
|
||||
maxStage = r.Stage
|
||||
}
|
||||
if r.Stage < minStage {
|
||||
minStage = r.Stage
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate total sessions per stage
|
||||
stageTotals := make(map[int64]uint64)
|
||||
for stage, stageRows := range dataByStage {
|
||||
for _, r := range stageRows {
|
||||
stageTotals[stage] += r.SessionsCount
|
||||
}
|
||||
}
|
||||
|
||||
// Determine base count for percentage calculations
|
||||
// We'll use the starting point (usually stage 1) as our base
|
||||
var baseSessionsCount uint64
|
||||
if count, exists := stageTotals[1]; exists {
|
||||
baseSessionsCount = count
|
||||
} else {
|
||||
// If stage 1 doesn't exist, use the first available positive stage
|
||||
for stage := int64(0); stage <= maxStage; stage++ {
|
||||
if count, exists := stageTotals[stage]; exists {
|
||||
baseSessionsCount = count
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if baseSessionsCount == 0 {
|
||||
baseSessionsCount = 1 // Prevent division by zero
|
||||
}
|
||||
|
||||
// Number of top nodes to display per stage
|
||||
topLimit := int(p.Rows)
|
||||
if topLimit <= 0 {
|
||||
topLimit = 5 // Default if not specified
|
||||
}
|
||||
|
||||
// Step 1: Determine the top paths at each stage based on destination
|
||||
type pathKey struct {
|
||||
eventName string
|
||||
eventProp string
|
||||
}
|
||||
|
||||
// Map to store top paths for each stage
|
||||
topPathsByStage := make(map[int64]map[pathKey]bool)
|
||||
pathCountsByStage := make(map[int64]map[pathKey]uint64)
|
||||
|
||||
for stage := minStage; stage <= maxStage; stage++ {
|
||||
// Skip if this stage has no data
|
||||
if _, exists := dataByStage[stage]; !exists {
|
||||
continue
|
||||
}
|
||||
|
||||
// Sort rows within each stage by session count (descending)
|
||||
sort.Slice(dataByStage[stage], func(i, j int) bool {
|
||||
return dataByStage[stage][i].SessionsCount > dataByStage[stage][j].SessionsCount
|
||||
})
|
||||
|
||||
// Initialize maps for this stage
|
||||
topPathsByStage[stage] = make(map[pathKey]bool)
|
||||
pathCountsByStage[stage] = make(map[pathKey]uint64)
|
||||
|
||||
// First, aggregate by path to get total sessions per path
|
||||
for _, r := range dataByStage[stage] {
|
||||
key := pathKey{eventName: r.CurrentEventName, eventProp: r.CurrentEventProperty}
|
||||
pathCountsByStage[stage][key] += r.SessionsCount
|
||||
}
|
||||
|
||||
// Then sort paths by session count
|
||||
type pathCount struct {
|
||||
path pathKey
|
||||
count uint64
|
||||
}
|
||||
|
||||
var paths []pathCount
|
||||
for path, count := range pathCountsByStage[stage] {
|
||||
paths = append(paths, pathCount{path: path, count: count})
|
||||
}
|
||||
|
||||
// Sort descending by count
|
||||
sort.Slice(paths, func(i, j int) bool {
|
||||
return paths[i].count > paths[j].count
|
||||
})
|
||||
|
||||
// Mark top paths - take exactly topLimit or all if fewer available
|
||||
for i, pc := range paths {
|
||||
if i < topLimit {
|
||||
topPathsByStage[stage][pc.path] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 2: Create a normalized sequential depth mapping
|
||||
// First, gather all stages that have data
|
||||
var stagesWithData []int64
|
||||
for stage := range dataByStage {
|
||||
stagesWithData = append(stagesWithData, stage)
|
||||
}
|
||||
|
||||
// Sort stages
|
||||
sort.Slice(stagesWithData, func(i, j int) bool {
|
||||
return stagesWithData[i] < stagesWithData[j]
|
||||
})
|
||||
|
||||
var startingStage int64
|
||||
for _, s := range stagesWithData {
|
||||
if s > 0 {
|
||||
startingStage = s
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Create a mapping from logical stage to display depth (ensuring no gaps)
|
||||
stageToDepth := make(map[int64]int)
|
||||
for i, stage := range stagesWithData {
|
||||
stageToDepth[stage] = i
|
||||
}
|
||||
|
||||
// Determine depth of central node (stage 1 or equivalent)
|
||||
var centralDepth int
|
||||
if depth, exists := stageToDepth[1]; exists {
|
||||
centralDepth = depth
|
||||
} else {
|
||||
// If stage 1 doesn't exist, use the first positive stage
|
||||
for _, stage := range stagesWithData {
|
||||
if stage > 0 {
|
||||
centralDepth = stageToDepth[stage]
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 3: Create nodes with normalized depths
|
||||
var nodes []Node
|
||||
var links []Link
|
||||
nodeID := 0
|
||||
|
||||
// Maps to track nodes and sessions
|
||||
nodeMap := make(map[string]int) // Stage|EventName|EventProp → nodeID
|
||||
othersNodes := make(map[int64]int) // stage → "Others" nodeID
|
||||
dropNodes := make(map[int64]int) // stage → "Drop" nodeID
|
||||
|
||||
incomingSessions := make(map[int]uint64) // nodeID → incoming sessions
|
||||
outgoingSessions := make(map[int]uint64) // nodeID → outgoing sessions
|
||||
|
||||
// Create all nodes using normalized depths
|
||||
for _, stage := range stagesWithData {
|
||||
displayDepth := stageToDepth[stage]
|
||||
|
||||
// Create regular nodes for top paths
|
||||
for path := range topPathsByStage[stage] {
|
||||
nodeKey := fmt.Sprintf("%d|%s|%s", stage, path.eventName, path.eventProp)
|
||||
nodeMap[nodeKey] = nodeID
|
||||
|
||||
nodes = append(nodes, Node{
|
||||
ID: nodeID,
|
||||
Depth: displayDepth,
|
||||
Name: path.eventProp,
|
||||
EventType: path.eventName,
|
||||
StartingNode: stage == startingStage,
|
||||
})
|
||||
|
||||
// For the central stage (usually stage 1) or first stage, set incoming sessions
|
||||
if (stage == 1) || (stage == minStage && minStage != 1) {
|
||||
incomingSessions[nodeID] = pathCountsByStage[stage][path]
|
||||
}
|
||||
|
||||
nodeID++
|
||||
}
|
||||
|
||||
// Calculate if we need an "Others" node (when total paths > topLimit)
|
||||
totalPaths := len(pathCountsByStage[stage])
|
||||
if totalPaths > topLimit {
|
||||
// Calculate sessions that will go to Others
|
||||
othersCount := uint64(0)
|
||||
for path, count := range pathCountsByStage[stage] {
|
||||
if !topPathsByStage[stage][path] {
|
||||
othersCount += count
|
||||
}
|
||||
}
|
||||
|
||||
// Only create Others if it has sessions
|
||||
if othersCount > 0 {
|
||||
othersNodes[stage] = nodeID
|
||||
|
||||
nodes = append(nodes, Node{
|
||||
ID: nodeID,
|
||||
Depth: displayDepth,
|
||||
Name: "other",
|
||||
EventType: "OTHER",
|
||||
StartingNode: stage == startingStage,
|
||||
})
|
||||
|
||||
// For the central stage or first stage, set incoming sessions for Others
|
||||
if (stage == 1) || (stage == minStage && minStage != 1) {
|
||||
incomingSessions[nodeID] = othersCount
|
||||
}
|
||||
|
||||
nodeID++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 4: Create links between adjacent nodes only
|
||||
// Use a map to deduplicate links
|
||||
type linkKey struct {
|
||||
src int
|
||||
tgt int
|
||||
}
|
||||
linkSessions := make(map[linkKey]uint64)
|
||||
linkTypes := make(map[linkKey]string)
|
||||
|
||||
// For each stage (except the first), create links from the previous stage
|
||||
for i := 1; i < len(stagesWithData); i++ {
|
||||
currentStage := stagesWithData[i]
|
||||
prevStage := stagesWithData[i-1]
|
||||
|
||||
for _, r := range dataByStage[currentStage] {
|
||||
// Skip if previous stage doesn't match expected
|
||||
if r.Stage != currentStage {
|
||||
continue
|
||||
}
|
||||
|
||||
// Determine source node
|
||||
prevPathKey := fmt.Sprintf("%d|%s|%s", prevStage, r.PrevEventName, r.PrevEventProperty)
|
||||
srcID, hasSrc := nodeMap[prevPathKey]
|
||||
|
||||
if !hasSrc {
|
||||
// If source isn't a top node, use Others from previous stage
|
||||
if othersID, hasOthers := othersNodes[prevStage]; hasOthers {
|
||||
srcID = othersID
|
||||
hasSrc = true
|
||||
} else {
|
||||
// Skip if we can't find a source
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Determine target node
|
||||
curPath := pathKey{eventName: r.CurrentEventName, eventProp: r.CurrentEventProperty}
|
||||
var tgtID int
|
||||
var hasTgt bool
|
||||
|
||||
// Check if this path is in the top paths for this stage
|
||||
if topPathsByStage[currentStage][curPath] {
|
||||
// It's a top node
|
||||
curPathKey := fmt.Sprintf("%d|%s|%s", currentStage, r.CurrentEventName, r.CurrentEventProperty)
|
||||
tgtID = nodeMap[curPathKey]
|
||||
hasTgt = true
|
||||
} else {
|
||||
// It's part of Others
|
||||
if othersID, hasOthers := othersNodes[currentStage]; hasOthers {
|
||||
tgtID = othersID
|
||||
hasTgt = true
|
||||
}
|
||||
}
|
||||
|
||||
if !hasSrc || !hasTgt {
|
||||
continue
|
||||
}
|
||||
|
||||
// Update session tracking
|
||||
incomingSessions[tgtID] += r.SessionsCount
|
||||
outgoingSessions[srcID] += r.SessionsCount
|
||||
|
||||
// Record link (deduplicating)
|
||||
lk := linkKey{src: srcID, tgt: tgtID}
|
||||
linkSessions[lk] += r.SessionsCount
|
||||
|
||||
// Prefer non-OTHER event type
|
||||
if linkTypes[lk] == "" || linkTypes[lk] == "OTHER" {
|
||||
linkTypes[lk] = r.CurrentEventName
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create deduplicated links with proper percentages
|
||||
for lk, count := range linkSessions {
|
||||
// Calculate percentage based on baseSessionsCount
|
||||
percent := math.Round(float64(count)*10000/float64(baseSessionsCount)) / 100
|
||||
|
||||
links = append(links, Link{
|
||||
Source: lk.src,
|
||||
Target: lk.tgt,
|
||||
SessionsCount: int(count),
|
||||
Value: percent,
|
||||
EventType: linkTypes[lk],
|
||||
})
|
||||
}
|
||||
|
||||
// Step 5: Calculate drops and create drop nodes (only for stages ≥ 0)
|
||||
// Process forward drops (positive stages only)
|
||||
for i := 0; i < len(stagesWithData)-1; i++ {
|
||||
stage := stagesWithData[i]
|
||||
|
||||
// Skip negative stages for drops
|
||||
if stage < 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Calculate new drops at this stage
|
||||
stageDrops := uint64(0)
|
||||
dropsFromNode := make(map[int]uint64) // nodeID -> drop count
|
||||
|
||||
for _, node := range nodes {
|
||||
nodeDepth := node.Depth
|
||||
|
||||
// Skip if this node isn't in the current stage
|
||||
if nodeDepth != stageToDepth[stage] {
|
||||
continue
|
||||
}
|
||||
|
||||
incoming := incomingSessions[node.ID]
|
||||
outgoing := outgoingSessions[node.ID]
|
||||
|
||||
if incoming > outgoing {
|
||||
dropCount := incoming - outgoing
|
||||
dropsFromNode[node.ID] = dropCount
|
||||
stageDrops += dropCount
|
||||
}
|
||||
}
|
||||
|
||||
// Skip if no drops
|
||||
if stageDrops == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Determine next stage depth for drop node positioning
|
||||
var dropDepth int
|
||||
if i+1 < len(stagesWithData) {
|
||||
dropDepth = stageToDepth[stagesWithData[i+1]]
|
||||
} else {
|
||||
dropDepth = stageToDepth[stage] + 1
|
||||
}
|
||||
|
||||
// Create drop node
|
||||
dropNodes[stage] = nodeID
|
||||
|
||||
nodes = append(nodes, Node{
|
||||
ID: nodeID,
|
||||
Depth: dropDepth,
|
||||
Name: "drop",
|
||||
EventType: "DROP",
|
||||
})
|
||||
|
||||
// Create links from nodes with drops to the drop node
|
||||
for nid, dropCount := range dropsFromNode {
|
||||
if dropCount == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Calculate percentage based on baseSessionsCount
|
||||
percent := math.Round(float64(dropCount)*10000/float64(baseSessionsCount)) / 100
|
||||
|
||||
links = append(links, Link{
|
||||
Source: nid,
|
||||
Target: nodeID,
|
||||
SessionsCount: int(dropCount),
|
||||
Value: percent,
|
||||
EventType: "DROP",
|
||||
})
|
||||
}
|
||||
|
||||
// Link previous drop node to current drop node to show accumulation
|
||||
if i > 0 {
|
||||
for j := i - 1; j >= 0; j-- {
|
||||
prevStage := stagesWithData[j]
|
||||
if prevDropID, hasPrevDrop := dropNodes[prevStage]; hasPrevDrop {
|
||||
// Link previous drop to current drop to show accumulation
|
||||
prevDropCount := uint64(0)
|
||||
for _, link := range links {
|
||||
if link.Target == prevDropID && link.EventType == "DROP" {
|
||||
prevDropCount += uint64(link.SessionsCount)
|
||||
}
|
||||
}
|
||||
|
||||
percent := math.Round(float64(prevDropCount)*10000/float64(baseSessionsCount)) / 100
|
||||
|
||||
links = append(links, Link{
|
||||
Source: prevDropID,
|
||||
Target: nodeID,
|
||||
SessionsCount: int(prevDropCount),
|
||||
Value: percent,
|
||||
EventType: "DROP",
|
||||
})
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
nodeID++
|
||||
}
|
||||
|
||||
// Filter out nodes with no connections
|
||||
nodeHasConnection := make(map[int]bool)
|
||||
for _, link := range links {
|
||||
nodeHasConnection[link.Source] = true
|
||||
nodeHasConnection[link.Target] = true
|
||||
}
|
||||
|
||||
// Make sure central nodes are included even if they don't have links
|
||||
for _, node := range nodes {
|
||||
if node.Depth == centralDepth {
|
||||
nodeHasConnection[node.ID] = true
|
||||
}
|
||||
}
|
||||
|
||||
var filteredNodes []Node
|
||||
for _, node := range nodes {
|
||||
if nodeHasConnection[node.ID] {
|
||||
filteredNodes = append(filteredNodes, node)
|
||||
}
|
||||
}
|
||||
|
||||
// Reassign IDs to be sequential
|
||||
nodeIDMap := make(map[int]int)
|
||||
var finalNodes []Node = make([]Node, 0, len(filteredNodes))
|
||||
|
||||
for newID, node := range filteredNodes {
|
||||
nodeIDMap[node.ID] = newID
|
||||
node.ID = newID
|
||||
finalNodes = append(finalNodes, node)
|
||||
}
|
||||
|
||||
// Update link references
|
||||
var finalLinks []Link = make([]Link, 0, len(links))
|
||||
for _, link := range links {
|
||||
srcID, srcExists := nodeIDMap[link.Source]
|
||||
tgtID, tgtExists := nodeIDMap[link.Target]
|
||||
|
||||
if srcExists && tgtExists {
|
||||
link.Source = srcID
|
||||
link.Target = tgtID
|
||||
finalLinks = append(finalLinks, link)
|
||||
}
|
||||
}
|
||||
|
||||
return JourneyData{
|
||||
Nodes: finalNodes,
|
||||
Links: finalLinks,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (h UserJourneyQueryBuilder) buildQuery(p Payload) (string, error) {
|
||||
// prepare event list filter
|
||||
events := p.MetricValue
|
||||
if len(events) == 0 {
|
||||
events = []string{"LOCATION"}
|
||||
}
|
||||
vals := make([]string, len(events))
|
||||
for i, v := range events {
|
||||
vals[i] = fmt.Sprintf("'%s'", v)
|
||||
}
|
||||
laterCond := fmt.Sprintf("e.\"$event_name\" IN (%s)", strings.Join(vals, ","))
|
||||
|
||||
// build start and exclude conditions
|
||||
startConds, _ := buildEventConditions(p.StartPoint, BuildConditionsOptions{DefinedColumns: mainColumns, MainTableAlias: "e"})
|
||||
excludeConds, _ := buildEventConditions(p.Exclude, BuildConditionsOptions{DefinedColumns: mainColumns, MainTableAlias: "e"})
|
||||
|
||||
// quote properties column correctly
|
||||
fixProps := func(conds []string) []string {
|
||||
for i, c := range conds {
|
||||
conds[i] = strings.ReplaceAll(c, "e.$properties", "e.\"$properties\"")
|
||||
}
|
||||
return conds
|
||||
}
|
||||
startConds = fixProps(startConds)
|
||||
excludeConds = fixProps(excludeConds)
|
||||
|
||||
// extract global filters and duration from first series
|
||||
s := p.MetricPayload.Series[0]
|
||||
var durationMin, durationMax int64
|
||||
var okMin, okMax bool
|
||||
var err error
|
||||
var globalFilters []Filter
|
||||
for _, flt := range s.Filter.Filters {
|
||||
if flt.Type == "duration" {
|
||||
if len(flt.Value) > 0 && flt.Value[0] != "" {
|
||||
durationMin, err = strconv.ParseInt(flt.Value[0], 10, 64)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
okMin = true
|
||||
}
|
||||
if len(flt.Value) > 1 && flt.Value[1] != "" {
|
||||
durationMax, err = strconv.ParseInt(flt.Value[1], 10, 64)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
okMax = true
|
||||
}
|
||||
continue
|
||||
}
|
||||
if flt.IsEvent {
|
||||
continue
|
||||
}
|
||||
globalFilters = append(globalFilters, flt)
|
||||
}
|
||||
globalConds, _ := buildEventConditions(globalFilters, BuildConditionsOptions{DefinedColumns: mainColumns, MainTableAlias: "e"})
|
||||
globalConds = fixProps(globalConds)
|
||||
|
||||
// assemble duration condition
|
||||
var durCond string
|
||||
if okMin && okMax {
|
||||
durCond = fmt.Sprintf("ss.duration BETWEEN %d AND %d", durationMin, durationMax)
|
||||
} else if okMin {
|
||||
durCond = fmt.Sprintf("ss.duration >= %d", durationMin)
|
||||
} else if okMax {
|
||||
durCond = fmt.Sprintf("ss.duration <= %d", durationMax)
|
||||
}
|
||||
|
||||
// determine starting event
|
||||
var startEvent string
|
||||
if len(p.StartPoint) > 0 {
|
||||
startEvent = string(p.StartPoint[0].Type)
|
||||
} else {
|
||||
startEvent = events[0]
|
||||
}
|
||||
|
||||
// assemble first_hits WHERE clause with optional duration
|
||||
firstBase := []string{fmt.Sprintf("e.\"$event_name\" = '%s'", startEvent)}
|
||||
if len(startConds) > 0 {
|
||||
firstBase = append(firstBase, startConds...)
|
||||
}
|
||||
if len(globalConds) > 0 {
|
||||
firstBase = append(firstBase, globalConds...)
|
||||
}
|
||||
firstBase = append(firstBase,
|
||||
fmt.Sprintf("e.project_id = %d", p.ProjectId),
|
||||
"e.session_id IS NOT NULL",
|
||||
fmt.Sprintf("e.created_at BETWEEN toDateTime('%s') AND toDateTime('%s')",
|
||||
time.Unix(p.StartTimestamp/1000, 0).UTC().Format("2006-01-02 15:04:05"),
|
||||
time.Unix(p.EndTimestamp/1000, 0).UTC().Format("2006-01-02 15:04:05"),
|
||||
),
|
||||
)
|
||||
if durCond != "" {
|
||||
firstBase = append(firstBase, durCond)
|
||||
}
|
||||
|
||||
// assemble journey WHERE clause
|
||||
journeyBase := []string{laterCond}
|
||||
if len(excludeConds) > 0 {
|
||||
journeyBase = append(journeyBase, "NOT ("+strings.Join(excludeConds, " AND ")+")")
|
||||
}
|
||||
if len(globalConds) > 0 {
|
||||
journeyBase = append(journeyBase, globalConds...)
|
||||
}
|
||||
journeyBase = append(journeyBase,
|
||||
fmt.Sprintf("e.project_id = %d", p.ProjectId),
|
||||
)
|
||||
|
||||
// format time bounds
|
||||
startTime := time.Unix(p.StartTimestamp/1000, 0).UTC().Format("2006-01-02 15:04:05")
|
||||
endTime := time.Unix(p.EndTimestamp/1000, 0).UTC().Format("2006-01-02 15:04:05")
|
||||
|
||||
// set column limits
|
||||
previousColumns := p.PreviousColumns
|
||||
if previousColumns <= 0 {
|
||||
previousColumns = 0
|
||||
}
|
||||
maxCols := p.Columns
|
||||
if maxCols > 0 {
|
||||
maxCols++
|
||||
}
|
||||
|
||||
// build final query
|
||||
q := fmt.Sprintf(`WITH
|
||||
first_hits AS (
|
||||
SELECT e.session_id, MIN(e.created_at) AS start_time
|
||||
FROM product_analytics.events AS e
|
||||
JOIN experimental.sessions AS ss USING(session_id)
|
||||
WHERE %s
|
||||
GROUP BY e.session_id
|
||||
),
|
||||
journey_events_after AS (
|
||||
SELECT
|
||||
e.session_id,
|
||||
e.distinct_id,
|
||||
e."$event_name" AS event_name,
|
||||
e.created_at,
|
||||
CASE
|
||||
WHEN e."$event_name" = 'LOCATION' THEN JSONExtractString(toString(e."$properties"), 'url_path')
|
||||
WHEN e."$event_name" = 'CLICK' THEN JSONExtractString(toString(e."$properties"), 'label')
|
||||
WHEN e."$event_name" = 'INPUT' THEN JSONExtractString(toString(e."$properties"), 'label')
|
||||
ELSE NULL
|
||||
END AS event_property
|
||||
FROM product_analytics.events AS e
|
||||
JOIN first_hits AS f USING(session_id)
|
||||
WHERE
|
||||
e.created_at >= f.start_time
|
||||
AND e.created_at <= toDateTime('%s')
|
||||
AND %s
|
||||
),
|
||||
journey_events_before AS (
|
||||
SELECT
|
||||
e.session_id,
|
||||
e.distinct_id,
|
||||
e."$event_name" AS event_name,
|
||||
e.created_at,
|
||||
CASE
|
||||
WHEN e."$event_name" = 'LOCATION' THEN JSONExtractString(toString(e."$properties"), 'url_path')
|
||||
WHEN e."$event_name" = 'CLICK' THEN JSONExtractString(toString(e."$properties"), 'label')
|
||||
WHEN e."$event_name" = 'INPUT' THEN JSONExtractString(toString(e."$properties"), 'label')
|
||||
ELSE NULL
|
||||
END AS event_property
|
||||
FROM product_analytics.events AS e
|
||||
JOIN first_hits AS f USING(session_id)
|
||||
WHERE
|
||||
e.created_at < f.start_time
|
||||
AND e.created_at >= toDateTime('%s')
|
||||
AND %s
|
||||
AND %d > 0
|
||||
),
|
||||
journey_events_combined AS (
|
||||
SELECT *, 1 AS direction FROM journey_events_after
|
||||
UNION ALL
|
||||
SELECT *, -1 AS direction FROM journey_events_before
|
||||
),
|
||||
event_with_prev AS (
|
||||
SELECT
|
||||
session_id,
|
||||
distinct_id,
|
||||
event_name,
|
||||
event_property,
|
||||
created_at,
|
||||
direction,
|
||||
any(event_name) OVER (PARTITION BY session_id ORDER BY created_at ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS previous_event_name,
|
||||
any(event_property) OVER (PARTITION BY session_id ORDER BY created_at ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS previous_event_property
|
||||
FROM journey_events_combined
|
||||
),
|
||||
staged AS (
|
||||
SELECT
|
||||
*,
|
||||
CASE
|
||||
WHEN direction = 1 THEN toInt64(sumIf(1, true) OVER (PARTITION BY session_id, direction ORDER BY created_at ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW))
|
||||
WHEN direction = -1 THEN -1 * toInt64(sumIf(1, true) OVER (PARTITION BY session_id, direction ORDER BY created_at DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW))
|
||||
ELSE 0
|
||||
END AS stage
|
||||
FROM event_with_prev
|
||||
)
|
||||
SELECT
|
||||
stage AS stage,
|
||||
event_name AS current_event_name,
|
||||
event_property AS current_event_property,
|
||||
COALESCE(previous_event_name, '') AS previous_event_name,
|
||||
COALESCE(previous_event_property, '') AS previous_event_property,
|
||||
COUNT(DISTINCT session_id) AS sessions_count
|
||||
FROM staged
|
||||
WHERE stage <= %d AND stage >= -%d
|
||||
GROUP BY
|
||||
stage,
|
||||
event_name,
|
||||
event_property,
|
||||
previous_event_name,
|
||||
previous_event_property
|
||||
ORDER BY stage, COUNT(DISTINCT session_id) DESC;`,
|
||||
strings.Join(firstBase, " AND "),
|
||||
endTime,
|
||||
strings.Join(journeyBase, " AND "),
|
||||
startTime,
|
||||
strings.Join(journeyBase, " AND "),
|
||||
previousColumns,
|
||||
maxCols,
|
||||
previousColumns,
|
||||
)
|
||||
return q, nil
|
||||
}
|
||||
|
|
@ -1,184 +1,21 @@
|
|||
package charts
|
||||
|
||||
type Table string
|
||||
type Column string
|
||||
type MetricType string
|
||||
type FilterType string
|
||||
type EventType string
|
||||
type EventOrder string
|
||||
|
||||
const (
|
||||
TableEvents Table = "product_analytics.events"
|
||||
TableSessions Table = "experimental.sessions"
|
||||
)
|
||||
|
||||
const (
|
||||
ColEventTime Column = "main.created_at"
|
||||
ColEventName Column = "main.`$event_name`"
|
||||
ColEventProjectID Column = "main.project_id"
|
||||
ColEventProperties Column = "main.`$properties`"
|
||||
ColEventSessionID Column = "main.session_id"
|
||||
ColEventURLPath Column = "main.url_path"
|
||||
ColEventStatus Column = "main.status"
|
||||
)
|
||||
|
||||
const (
|
||||
ColSessionID Column = "s.session_id"
|
||||
ColDuration Column = "s.duration"
|
||||
ColUserCountry Column = "s.user_country"
|
||||
ColUserCity Column = "s.user_city"
|
||||
ColUserState Column = "s.user_state"
|
||||
ColUserID Column = "s.user_id"
|
||||
ColUserAnonymousID Column = "s.user_anonymous_id"
|
||||
ColUserOS Column = "s.user_os"
|
||||
ColUserBrowser Column = "s.user_browser"
|
||||
ColUserDevice Column = "s.user_device"
|
||||
ColUserDeviceType Column = "s.user_device_type"
|
||||
ColRevID Column = "s.rev_id"
|
||||
ColBaseReferrer Column = "s.base_referrer"
|
||||
ColUtmSource Column = "s.utm_source"
|
||||
ColUtmMedium Column = "s.utm_medium"
|
||||
ColUtmCampaign Column = "s.utm_campaign"
|
||||
ColMetadata1 Column = "s.metadata_1"
|
||||
ColSessionProjectID Column = "s.project_id"
|
||||
ColSessionIsNotNull Column = "isNotNull(s.duration)"
|
||||
)
|
||||
|
||||
const (
|
||||
MetricTypeTimeseries MetricType = "timeseries"
|
||||
MetricTypeTable MetricType = "table"
|
||||
MetricTypeFunnel MetricType = "funnel"
|
||||
MetricTypeHeatmap MetricType = "heatMap"
|
||||
MetricTypeSession MetricType = "heatmaps_session"
|
||||
MetricUserJourney MetricType = "pathAnalysis"
|
||||
)
|
||||
|
||||
const (
|
||||
EventOrderThen EventOrder = "then"
|
||||
EventOrderOr EventOrder = "or"
|
||||
EventOrderAnd EventOrder = "and"
|
||||
)
|
||||
|
||||
type MetricPayload struct {
|
||||
StartTimestamp int64 `json:"startTimestamp"`
|
||||
EndTimestamp int64 `json:"endTimestamp"`
|
||||
Density int `json:"density"`
|
||||
MetricOf string `json:"metricOf"`
|
||||
MetricType MetricType `json:"metricType"`
|
||||
MetricValue []string `json:"metricValue"`
|
||||
MetricFormat string `json:"metricFormat"`
|
||||
ViewType string `json:"viewType"`
|
||||
Name string `json:"name"`
|
||||
Series []Series `json:"series"`
|
||||
Limit int `json:"limit"`
|
||||
Page int `json:"page"`
|
||||
StartPoint []Filter `json:"startPoint"`
|
||||
Exclude []Filter `json:"excludes"`
|
||||
Rows uint64 `json:"rows"`
|
||||
Columns uint64 `json:"columns"`
|
||||
PreviousColumns uint64 `json:"previousColumns"`
|
||||
}
|
||||
|
||||
type MetricOfTable string
|
||||
|
||||
const (
|
||||
MetricOfTableLocation MetricOfTable = "LOCATION" // TOP Pages
|
||||
MetricOfTableBrowser MetricOfTable = "userBrowser"
|
||||
MetricOfTableReferrer MetricOfTable = "referrer"
|
||||
MetricOfTableUserId MetricOfTable = "userId"
|
||||
MetricOfTableCountry MetricOfTable = "userCountry"
|
||||
MetricOfTableDevice MetricOfTable = "userDevice"
|
||||
MetricOfTableFetch MetricOfTable = "FETCH"
|
||||
|
||||
//MetricOfTableIssues MetricOfTable = "issues"
|
||||
//MetricOfTableSessions MetricOfTable = "sessions"
|
||||
//MetricOfTableErrors MetricOfTable = "errors"
|
||||
)
|
||||
|
||||
type FilterGroup struct {
|
||||
Filters []Filter `json:"filters"`
|
||||
EventsOrder EventOrder `json:"eventsOrder"`
|
||||
}
|
||||
|
||||
type Series struct {
|
||||
Name string `json:"name"`
|
||||
Filter FilterGroup `json:"filter"`
|
||||
}
|
||||
|
||||
type Filter struct {
|
||||
Type FilterType `json:"type"`
|
||||
IsEvent bool `json:"isEvent"`
|
||||
Value []string `json:"value"`
|
||||
Operator string `json:"operator"`
|
||||
Source string `json:"source,omitempty"`
|
||||
Filters []Filter `json:"filters"`
|
||||
}
|
||||
|
||||
const (
|
||||
FilterUserId FilterType = "userId"
|
||||
FilterUserAnonymousId FilterType = "userAnonymousId"
|
||||
FilterReferrer FilterType = "referrer"
|
||||
FilterDuration FilterType = "duration"
|
||||
FilterUtmSource FilterType = "utmSource"
|
||||
FilterUtmMedium FilterType = "utmMedium"
|
||||
FilterUtmCampaign FilterType = "utmCampaign"
|
||||
FilterUserCountry FilterType = "userCountry"
|
||||
FilterUserCity FilterType = "userCity"
|
||||
FilterUserState FilterType = "userState"
|
||||
FilterUserOs FilterType = "userOs"
|
||||
FilterUserBrowser FilterType = "userBrowser"
|
||||
FilterUserDevice FilterType = "userDevice"
|
||||
FilterPlatform FilterType = "platform"
|
||||
FilterRevId FilterType = "revId"
|
||||
FilterIssue FilterType = "issue"
|
||||
FilterMetadata FilterType = "metadata"
|
||||
)
|
||||
|
||||
// Event filters
|
||||
const (
|
||||
FilterClick FilterType = "CLICK"
|
||||
FilterInput FilterType = "INPUT"
|
||||
FilterLocation FilterType = "LOCATION"
|
||||
FilterTag FilterType = "tag"
|
||||
FilterCustom FilterType = "customEvent"
|
||||
FilterFetch FilterType = "fetch"
|
||||
FilterFetchStatusCode FilterType = "fetchStatusCode" // Subfilter
|
||||
FilterGraphQLRequest FilterType = "graphql"
|
||||
FilterStateAction FilterType = "stateAction"
|
||||
FilterError FilterType = "error"
|
||||
FilterAvgCpuLoad FilterType = "avgCpuLoad"
|
||||
FilterAvgMemoryUsage FilterType = "avgMemoryUsage"
|
||||
)
|
||||
|
||||
// MOBILE FILTERS
|
||||
const (
|
||||
FilterUserOsIos FilterType = "userOsIos"
|
||||
FilterUserDeviceIos FilterType = "userDeviceIos"
|
||||
FilterUserCountryIos FilterType = "userCountryIos"
|
||||
FilterUserIdIos FilterType = "userIdIos"
|
||||
FilterUserAnonymousIdIos FilterType = "userAnonymousIdIos"
|
||||
FilterRevIdIos FilterType = "revIdIos"
|
||||
)
|
||||
|
||||
const (
|
||||
OperatorStringIs = "is"
|
||||
OperatorStringIsAny = "isAny"
|
||||
OperatorStringOn = "on"
|
||||
OperatorStringOnAny = "onAny"
|
||||
OperatorStringIsNot = "isNot"
|
||||
OperatorStringIsUndefined = "isUndefined"
|
||||
OperatorStringNotOn = "notOn"
|
||||
OperatorContains = "contains"
|
||||
OperatorStringNotContains = "notContains"
|
||||
OperatorStringStartsWith = "startsWith"
|
||||
OperatorStringEndsWith = "endsWith"
|
||||
)
|
||||
import "openreplay/backend/pkg/analytics/cards"
|
||||
|
||||
type DataPoint struct {
|
||||
Timestamp uint64 `json:"timestamp"`
|
||||
Count uint64 `json:"count"`
|
||||
Timestamp int64 `json:"timestamp"`
|
||||
Series map[string]int64 `json:"series"`
|
||||
}
|
||||
|
||||
//type TimeseriesResponse struct {
|
||||
// Data []DataPoint `json:"data"`
|
||||
//}
|
||||
type GetCardChartDataRequest struct {
|
||||
MetricType string `json:"metricType" validate:"required,oneof=timeseries table funnel"`
|
||||
MetricOf string `json:"metricOf" validate:"required,oneof=session_count user_count"`
|
||||
ViewType string `json:"viewType" validate:"required,oneof=line_chart table_view"`
|
||||
MetricFormat string `json:"metricFormat" validate:"required,oneof=default percentage"`
|
||||
SessionID int64 `json:"sessionId"`
|
||||
Series []cards.CardSeries `json:"series" validate:"required,dive"`
|
||||
}
|
||||
|
||||
type GetCardChartDataResponse struct {
|
||||
Data []DataPoint `json:"data"`
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,497 +0,0 @@
|
|||
package charts
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"openreplay/backend/pkg/analytics/db"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Payload struct {
|
||||
*MetricPayload
|
||||
GroupByColumn string // TODO remove this field
|
||||
ProjectId int
|
||||
UserId uint64
|
||||
}
|
||||
|
||||
type QueryBuilder interface {
|
||||
Execute(p Payload, conn db.Connector) (interface{}, error)
|
||||
}
|
||||
|
||||
func NewQueryBuilder(p Payload) (QueryBuilder, error) {
|
||||
switch p.MetricType {
|
||||
case MetricTypeTimeseries:
|
||||
return TimeSeriesQueryBuilder{}, nil
|
||||
case MetricTypeFunnel:
|
||||
return FunnelQueryBuilder{}, nil
|
||||
case MetricTypeTable:
|
||||
if p.MetricOf == "jsException" {
|
||||
return TableErrorsQueryBuilder{}, nil
|
||||
}
|
||||
return TableQueryBuilder{}, nil
|
||||
case MetricTypeHeatmap:
|
||||
return HeatmapQueryBuilder{}, nil
|
||||
case MetricTypeSession:
|
||||
return HeatmapSessionQueryBuilder{}, nil
|
||||
case MetricUserJourney:
|
||||
return UserJourneyQueryBuilder{}, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown metric type: %s", p.MetricType)
|
||||
}
|
||||
}
|
||||
|
||||
type BuildConditionsOptions struct {
|
||||
MainTableAlias string
|
||||
PropertiesColumnName string
|
||||
DefinedColumns map[string]string
|
||||
}
|
||||
|
||||
var propertyKeyMap = map[string]filterConfig{
|
||||
"LOCATION": {LogicalProperty: "url_path"},
|
||||
"FETCH": {LogicalProperty: "url_path"},
|
||||
"REQUEST": {LogicalProperty: "url_path"},
|
||||
"CLICK": {LogicalProperty: "label"},
|
||||
"INPUT": {LogicalProperty: "label"},
|
||||
"fetchUrl": {LogicalProperty: "url_path"},
|
||||
"fetchStatusCode": {LogicalProperty: "status", IsNumeric: true},
|
||||
//"fetchDuration": {LogicalProperty: "duration", IsNumeric: true},
|
||||
//"ISSUE": {LogicalProperty: "issue_type"},
|
||||
// TODO add more mappings as needed
|
||||
}
|
||||
|
||||
// filterConfig holds configuration for a filter type
|
||||
type filterConfig struct {
|
||||
LogicalProperty string
|
||||
IsNumeric bool
|
||||
}
|
||||
|
||||
func getColumnAccessor(logical string, isNumeric bool, opts BuildConditionsOptions) string {
|
||||
// helper: wrap names starting with $ in quotes
|
||||
quote := func(name string) string {
|
||||
prefix := opts.MainTableAlias + "."
|
||||
if strings.HasPrefix(name, prefix) {
|
||||
suffix := strings.TrimPrefix(name, prefix)
|
||||
if strings.HasPrefix(suffix, "$") {
|
||||
return fmt.Sprintf("%s.\"%s\"", opts.MainTableAlias, suffix)
|
||||
}
|
||||
}
|
||||
if strings.HasPrefix(name, "$") {
|
||||
return fmt.Sprintf("\"%s\"", name)
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
// explicit column mapping
|
||||
if col, ok := opts.DefinedColumns[logical]; ok {
|
||||
col = quote(col)
|
||||
if opts.MainTableAlias != "" {
|
||||
if strings.Contains(col, ".") {
|
||||
return fmt.Sprintf("%s", col)
|
||||
}
|
||||
return fmt.Sprintf("%s.%s", opts.MainTableAlias, col)
|
||||
}
|
||||
return col
|
||||
}
|
||||
|
||||
// determine property key
|
||||
propKey := logical
|
||||
if cfg, ok := propertyKeyMap[logical]; ok {
|
||||
propKey = cfg.LogicalProperty
|
||||
}
|
||||
|
||||
// build properties column reference
|
||||
colName := opts.PropertiesColumnName
|
||||
if opts.MainTableAlias != "" {
|
||||
colName = fmt.Sprintf("%s.%s", opts.MainTableAlias, colName)
|
||||
}
|
||||
colName = quote(colName)
|
||||
|
||||
// JSON extraction
|
||||
if isNumeric {
|
||||
return fmt.Sprintf("JSONExtractFloat(toString(%s), '%s')", colName, propKey)
|
||||
}
|
||||
return fmt.Sprintf("JSONExtractString(toString(%s), '%s')", colName, propKey)
|
||||
}
|
||||
|
||||
// buildEventConditions builds SQL conditions and names from filters
|
||||
func buildEventConditions(filters []Filter, options ...BuildConditionsOptions) (conds, names []string) {
|
||||
opts := BuildConditionsOptions{
|
||||
MainTableAlias: "",
|
||||
PropertiesColumnName: "$properties",
|
||||
DefinedColumns: make(map[string]string),
|
||||
}
|
||||
if len(options) > 0 {
|
||||
opt := options[0]
|
||||
if opt.MainTableAlias != "" {
|
||||
opts.MainTableAlias = opt.MainTableAlias
|
||||
}
|
||||
if opt.PropertiesColumnName != "" {
|
||||
opts.PropertiesColumnName = opt.PropertiesColumnName
|
||||
}
|
||||
if opt.DefinedColumns != nil {
|
||||
opts.DefinedColumns = opt.DefinedColumns
|
||||
}
|
||||
}
|
||||
for _, f := range filters {
|
||||
if f.Type == FilterDuration {
|
||||
continue
|
||||
}
|
||||
|
||||
fConds, fNames := addFilter(f, opts)
|
||||
if len(fConds) > 0 {
|
||||
conds = append(conds, fConds...)
|
||||
names = append(names, fNames...)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// addFilter processes a single Filter and returns its SQL conditions and associated event names
|
||||
func addFilter(f Filter, opts BuildConditionsOptions) (conds []string, names []string) {
|
||||
var ftype = string(f.Type)
|
||||
// resolve filter configuration, default if missing
|
||||
cfg, ok := propertyKeyMap[ftype]
|
||||
if !ok {
|
||||
cfg = filterConfig{LogicalProperty: ftype, IsNumeric: false}
|
||||
log.Printf("using default config for type: %v", f.Type)
|
||||
}
|
||||
acc := getColumnAccessor(cfg.LogicalProperty, cfg.IsNumeric, opts)
|
||||
|
||||
// operator-based conditions
|
||||
switch f.Operator {
|
||||
case "isAny", "onAny":
|
||||
if f.IsEvent {
|
||||
names = append(names, ftype)
|
||||
}
|
||||
default:
|
||||
if c := buildCond(acc, f.Value, f.Operator, cfg.IsNumeric); c != "" {
|
||||
conds = append(conds, c)
|
||||
if f.IsEvent {
|
||||
names = append(names, ftype)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// nested sub-filters
|
||||
if len(f.Filters) > 0 {
|
||||
subConds, subNames := buildEventConditions(f.Filters, opts)
|
||||
if len(subConds) > 0 {
|
||||
conds = append(conds, strings.Join(subConds, " AND "))
|
||||
names = append(names, subNames...)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
var compOps = map[string]string{
|
||||
"equals": "=", "is": "=", "on": "=",
|
||||
"notEquals": "<>", "not": "<>", "off": "<>",
|
||||
"greaterThan": ">", "gt": ">",
|
||||
"greaterThanOrEqual": ">=", "gte": ">=",
|
||||
"lessThan": "<", "lt": "<",
|
||||
"lessThanOrEqual": "<=", "lte": "<=",
|
||||
}
|
||||
|
||||
// buildCond constructs a condition string based on operator and values
|
||||
func buildCond(expr string, values []string, operator string, isNumeric bool) string {
|
||||
if len(values) == 0 {
|
||||
return ""
|
||||
}
|
||||
switch operator {
|
||||
case "contains":
|
||||
// wrap values with % on both sides
|
||||
wrapped := make([]string, len(values))
|
||||
for i, v := range values {
|
||||
wrapped[i] = fmt.Sprintf("%%%s%%", v)
|
||||
}
|
||||
return multiValCond(expr, wrapped, "%s ILIKE %s", false)
|
||||
case "notContains":
|
||||
wrapped := make([]string, len(values))
|
||||
for i, v := range values {
|
||||
wrapped[i] = fmt.Sprintf("%%%s%%", v)
|
||||
}
|
||||
cond := multiValCond(expr, wrapped, "%s ILIKE %s", false)
|
||||
return "NOT (" + cond + ")"
|
||||
case "startsWith":
|
||||
wrapped := make([]string, len(values))
|
||||
for i, v := range values {
|
||||
wrapped[i] = v + "%"
|
||||
}
|
||||
return multiValCond(expr, wrapped, "%s ILIKE %s", false)
|
||||
case "endsWith":
|
||||
wrapped := make([]string, len(values))
|
||||
for i, v := range values {
|
||||
wrapped[i] = "%" + v
|
||||
}
|
||||
return multiValCond(expr, wrapped, "%s ILIKE %s", false)
|
||||
case "regex":
|
||||
// build match expressions
|
||||
var parts []string
|
||||
for _, v := range values {
|
||||
parts = append(parts, fmt.Sprintf("match(%s, '%s')", expr, v))
|
||||
}
|
||||
if len(parts) > 1 {
|
||||
return "(" + strings.Join(parts, " OR ") + ")"
|
||||
}
|
||||
return parts[0]
|
||||
case "in", "notIn":
|
||||
neg := operator == "notIn"
|
||||
return inClause(expr, values, neg, isNumeric)
|
||||
case ">=", ">", "<=", "<":
|
||||
return multiValCond(expr, values, "%s "+operator+" %s", isNumeric)
|
||||
default:
|
||||
if op, ok := compOps[operator]; ok {
|
||||
tmpl := "%s " + op + " %s"
|
||||
return multiValCond(expr, values, tmpl, isNumeric)
|
||||
}
|
||||
// fallback equals
|
||||
tmpl := "%s = %s"
|
||||
return multiValCond(expr, values, tmpl, isNumeric)
|
||||
}
|
||||
}
|
||||
|
||||
// formatCondition applies a template to a single value, handling quoting
|
||||
func formatCondition(expr, tmpl, value string, isNumeric bool) string {
|
||||
val := value
|
||||
if !isNumeric {
|
||||
val = fmt.Sprintf("'%s'", value)
|
||||
}
|
||||
return fmt.Sprintf(tmpl, expr, val)
|
||||
}
|
||||
|
||||
// multiValCond applies a template to one or multiple values, using formatCondition
|
||||
func multiValCond(expr string, values []string, tmpl string, isNumeric bool) string {
|
||||
if len(values) == 1 {
|
||||
return formatCondition(expr, tmpl, values[0], isNumeric)
|
||||
}
|
||||
parts := make([]string, len(values))
|
||||
for i, v := range values {
|
||||
parts[i] = formatCondition(expr, tmpl, v, isNumeric)
|
||||
}
|
||||
return "(" + strings.Join(parts, " OR ") + ")"
|
||||
}
|
||||
|
||||
// inClause constructs IN/NOT IN clauses with proper quoting
|
||||
func inClause(expr string, values []string, negate, isNumeric bool) string {
|
||||
op := "IN"
|
||||
if negate {
|
||||
op = "NOT IN"
|
||||
}
|
||||
|
||||
if len(values) == 1 {
|
||||
return fmt.Sprintf("%s %s (%s)", expr, op, func() string {
|
||||
if isNumeric {
|
||||
return values[0]
|
||||
}
|
||||
return fmt.Sprintf("'%s'", values[0])
|
||||
}())
|
||||
}
|
||||
quoted := make([]string, len(values))
|
||||
for i, v := range values {
|
||||
if isNumeric {
|
||||
quoted[i] = v
|
||||
} else {
|
||||
quoted[i] = fmt.Sprintf("'%s'", v)
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("%s %s (%s)", expr, op, strings.Join(quoted, ", "))
|
||||
}
|
||||
|
||||
func buildSessionConditions(filters []Filter) []string {
|
||||
var conds []string
|
||||
|
||||
return conds
|
||||
}
|
||||
|
||||
func buildInClause(values []string) string {
|
||||
var quoted []string
|
||||
for _, v := range values {
|
||||
quoted = append(quoted, fmt.Sprintf("'%s'", v))
|
||||
}
|
||||
return strings.Join(quoted, ",")
|
||||
}
|
||||
|
||||
func buildStaticEventWhere(p Payload) string {
|
||||
return strings.Join([]string{
|
||||
fmt.Sprintf("main.project_id = %d", p.ProjectId),
|
||||
fmt.Sprintf("main.created_at >= toDateTime(%d / 1000)", p.StartTimestamp),
|
||||
fmt.Sprintf("main.created_at <= toDateTime(%d / 1000)", p.EndTimestamp),
|
||||
}, " AND ")
|
||||
}
|
||||
|
||||
func buildStaticSessionWhere(p Payload, sessionConds []string) (string, string) {
|
||||
static := []string{fmt.Sprintf("s.project_id = %d", p.ProjectId)}
|
||||
sessWhere := strings.Join(static, " AND ")
|
||||
if len(sessionConds) > 0 {
|
||||
sessWhere += " AND " + strings.Join(sessionConds, " AND ")
|
||||
}
|
||||
sessJoin := strings.Join(append(static, append(sessionConds,
|
||||
fmt.Sprintf("s.datetime >= toDateTime(%d / 1000)", p.StartTimestamp),
|
||||
fmt.Sprintf("s.datetime <= toDateTime(%d / 1000)", p.EndTimestamp))...), " AND ")
|
||||
return sessWhere, sessJoin
|
||||
}
|
||||
|
||||
func buildHavingClause(conds []string) string {
|
||||
seqConds := append([]string{}, conds...)
|
||||
if len(seqConds) == 1 {
|
||||
seqConds = append(seqConds, "1")
|
||||
}
|
||||
if len(seqConds) == 0 {
|
||||
return ""
|
||||
}
|
||||
var parts []string
|
||||
for i := range seqConds {
|
||||
parts = append(parts, fmt.Sprintf("(?%d)", i+1))
|
||||
}
|
||||
pattern := strings.Join(parts, "")
|
||||
args := []string{"toDateTime(main.created_at)"}
|
||||
args = append(args, seqConds...)
|
||||
return fmt.Sprintf("HAVING sequenceMatch('%s')(%s)) AS f", pattern, strings.Join(args, ",\n "))
|
||||
}
|
||||
|
||||
func contains(slice []string, s string) bool {
|
||||
for _, v := range slice {
|
||||
if v == s {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func FillMissingDataPoints(
|
||||
startTime, endTime int64,
|
||||
density int,
|
||||
neutral DataPoint,
|
||||
rows []DataPoint,
|
||||
timeCoefficient int64,
|
||||
) []DataPoint {
|
||||
if density <= 1 {
|
||||
return rows
|
||||
}
|
||||
|
||||
stepSize := uint64(getStepSize(startTime, endTime, density, false, 1000))
|
||||
bucketSize := stepSize * uint64(timeCoefficient)
|
||||
|
||||
lookup := make(map[uint64]DataPoint)
|
||||
for _, dp := range rows {
|
||||
if dp.Timestamp < uint64(startTime) {
|
||||
continue
|
||||
}
|
||||
bucket := uint64(startTime) + (((dp.Timestamp - uint64(startTime)) / bucketSize) * bucketSize)
|
||||
lookup[bucket] = dp
|
||||
}
|
||||
|
||||
results := make([]DataPoint, 0, density)
|
||||
for i := 0; i < density; i++ {
|
||||
ts := uint64(startTime) + uint64(i)*bucketSize
|
||||
if dp, ok := lookup[ts]; ok {
|
||||
results = append(results, dp)
|
||||
} else {
|
||||
nd := neutral
|
||||
nd.Timestamp = ts
|
||||
results = append(results, nd)
|
||||
}
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
func partitionFilters(filters []Filter) (sessionFilters []Filter, eventFilters []Filter) {
|
||||
for _, f := range filters {
|
||||
if f.IsEvent {
|
||||
eventFilters = append(eventFilters, f)
|
||||
} else {
|
||||
sessionFilters = append(sessionFilters, f)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Returns a map: logical property -> CTE alias (e.g., "userBrowser" -> "userBrowser")
|
||||
func cteColumnAliases() map[string]string {
|
||||
aliases := make(map[string]string)
|
||||
for logical := range mainColumns {
|
||||
aliases[logical] = logical
|
||||
}
|
||||
return aliases
|
||||
}
|
||||
|
||||
// Returns a map: logical property -> source column (e.g., "userBrowser" -> "$browser")
|
||||
func cteSourceColumns() map[string]string {
|
||||
cols := make(map[string]string)
|
||||
for logical, col := range mainColumns {
|
||||
cols[logical] = col
|
||||
}
|
||||
return cols
|
||||
}
|
||||
|
||||
// Helper for reverse lookup (used for dynamic SELECT)
|
||||
func reverseLookup(m map[string]string, value string) string {
|
||||
for k, v := range m {
|
||||
if v == value {
|
||||
return k
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func eventNameCondition(table, metricOf string) string {
|
||||
if table == "" {
|
||||
table = "main"
|
||||
}
|
||||
switch metricOf {
|
||||
case string(MetricOfTableFetch):
|
||||
return fmt.Sprintf("%s.`$event_name` = 'REQUEST'", table)
|
||||
case string(MetricOfTableLocation):
|
||||
return fmt.Sprintf("%s.`$event_name` = 'LOCATION'", table)
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
func buildDurationWhere(filters []Filter) ([]string, []Filter) {
|
||||
var conds []string
|
||||
var rest []Filter
|
||||
for _, f := range filters {
|
||||
if string(f.Type) == "duration" {
|
||||
v := f.Value
|
||||
if len(v) == 1 {
|
||||
if v[0] != "" {
|
||||
if d, err := strconv.ParseInt(v[0], 10, 64); err == nil {
|
||||
conds = append(conds, fmt.Sprintf("sessions.duration >= %d", d))
|
||||
}
|
||||
}
|
||||
} else if len(v) >= 2 {
|
||||
if v[0] != "" {
|
||||
if d, err := strconv.ParseInt(v[0], 10, 64); err == nil {
|
||||
conds = append(conds, fmt.Sprintf("sessions.duration >= %d", d))
|
||||
}
|
||||
}
|
||||
if v[1] != "" {
|
||||
if d, err := strconv.ParseInt(v[1], 10, 64); err == nil {
|
||||
conds = append(conds, fmt.Sprintf("sessions.duration <= %d", d))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
rest = append(rest, f)
|
||||
}
|
||||
}
|
||||
return conds, rest
|
||||
}
|
||||
|
||||
func filterOutTypes(filters []Filter, typesToRemove []FilterType) (kept []Filter, removed []Filter) {
|
||||
removeMap := make(map[FilterType]struct{}, len(typesToRemove))
|
||||
for _, t := range typesToRemove {
|
||||
removeMap[t] = struct{}{}
|
||||
}
|
||||
for _, f := range filters {
|
||||
if _, ok := removeMap[f.Type]; ok {
|
||||
removed = append(removed, f)
|
||||
} else {
|
||||
kept = append(kept, f)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
package dashboards
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
|
@ -335,14 +336,15 @@ func (s *dashboardsImpl) AddCards(projectId int, dashboardId int, userId uint64,
|
|||
return fmt.Errorf("failed to start transaction: %w", err)
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
defer func() {
|
||||
if err != nil {
|
||||
err := tx.TxRollback()
|
||||
tx.Rollback(ctx)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
err := tx.TxCommit()
|
||||
err := tx.Commit(ctx)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
|
@ -354,7 +356,7 @@ func (s *dashboardsImpl) AddCards(projectId int, dashboardId int, userId uint64,
|
|||
for _, metricID := range req.MetricIDs {
|
||||
// Check if the widget already exists
|
||||
var exists bool
|
||||
err := tx.TxQueryRow(`
|
||||
err := tx.QueryRow(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM public.dashboard_widgets
|
||||
WHERE dashboard_id = $1 AND metric_id = $2
|
||||
|
|
@ -369,9 +371,10 @@ func (s *dashboardsImpl) AddCards(projectId int, dashboardId int, userId uint64,
|
|||
}
|
||||
|
||||
// Insert new widget
|
||||
query := `INSERT INTO public.dashboard_widgets (dashboard_id, metric_id, user_id, config)
|
||||
VALUES ($1, $2, $3, $4)`
|
||||
err = tx.TxExec(query, dashboardId, metricID, userId, req.Config)
|
||||
_, err = tx.Exec(ctx, `
|
||||
INSERT INTO public.dashboard_widgets (dashboard_id, metric_id, user_id, config)
|
||||
VALUES ($1, $2, $3, $4)
|
||||
`, dashboardId, metricID, userId, req.Config)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to insert widget: %w", err)
|
||||
}
|
||||
|
|
@ -379,7 +382,7 @@ func (s *dashboardsImpl) AddCards(projectId int, dashboardId int, userId uint64,
|
|||
}
|
||||
|
||||
// Commit transaction
|
||||
if err := tx.TxCommit(); err != nil {
|
||||
if err := tx.Commit(ctx); err != nil {
|
||||
return fmt.Errorf("failed to commit transaction: %w", err)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,86 +0,0 @@
|
|||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
|
||||
"openreplay/backend/internal/config/common"
|
||||
"time"
|
||||
)
|
||||
|
||||
type TableValue struct {
|
||||
Name string `json:"name"`
|
||||
Total uint64 `json:"total"`
|
||||
}
|
||||
|
||||
type TableResponse struct {
|
||||
Total uint64 `json:"total"`
|
||||
Count uint64 `json:"count"`
|
||||
Values []TableValue `json:"values"`
|
||||
}
|
||||
|
||||
type Connector interface {
|
||||
Stop() error
|
||||
Query(query string) (driver.Rows, error)
|
||||
QueryRow(query string) (driver.Row, error)
|
||||
QueryArgs(query string, args map[string]interface{}) (driver.Rows, error)
|
||||
}
|
||||
|
||||
type connectorImpl struct {
|
||||
conn driver.Conn
|
||||
}
|
||||
|
||||
func NewConnector(cfg common.Clickhouse) (Connector, error) {
|
||||
conn, err := clickhouse.Open(&clickhouse.Options{
|
||||
Addr: []string{cfg.GetTrimmedURL()},
|
||||
Auth: clickhouse.Auth{
|
||||
Database: cfg.Database,
|
||||
Username: cfg.LegacyUserName,
|
||||
Password: cfg.LegacyPassword,
|
||||
},
|
||||
MaxOpenConns: 20,
|
||||
MaxIdleConns: 15,
|
||||
ConnMaxLifetime: 3 * time.Minute,
|
||||
Compression: &clickhouse.Compression{
|
||||
Method: clickhouse.CompressionLZ4,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &connectorImpl{conn: conn}, nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) Stop() error {
|
||||
return c.conn.Close()
|
||||
}
|
||||
|
||||
func (c *connectorImpl) Query(query string) (driver.Rows, error) {
|
||||
rows, err := c.conn.Query(context.Background(), query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
//defer rows.Close()
|
||||
|
||||
return rows, nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) QueryRow(query string) (driver.Row, error) {
|
||||
row := c.conn.QueryRow(context.Background(), query)
|
||||
if err := row.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
//defer row.Close()
|
||||
|
||||
return row, nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) QueryArgs(query string, args map[string]interface{}) (driver.Rows, error) {
|
||||
rows, err := c.conn.Query(context.Background(), query, args)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
//defer rows.Close()
|
||||
|
||||
return rows, nil
|
||||
}
|
||||
|
|
@ -1,654 +0,0 @@
|
|||
package query
|
||||
|
||||
//package main
|
||||
//
|
||||
//import (
|
||||
// "fmt"
|
||||
// "strings"
|
||||
//)
|
||||
//
|
||||
////func main() {
|
||||
//// var r Root
|
||||
//// //err := json.Unmarshal([]byte(jsonInput), &r)
|
||||
//// //if err != nil {
|
||||
//// // panic(err)
|
||||
//// //}
|
||||
//// //
|
||||
//// ////fmt.Println("ARGS:", r)
|
||||
//// //fmt.Println(buildQuery(r))
|
||||
//// ////fmt.Println("QUERY PART:", qp)
|
||||
////
|
||||
//// builder := NewQueryBuilder()
|
||||
//// query, err := builder.BuildQuery(r)
|
||||
//// if err != nil {
|
||||
//// fmt.Println("ERROR:", err)
|
||||
//// }
|
||||
////
|
||||
//// fmt.Println(query)
|
||||
////}
|
||||
//
|
||||
//type Table string
|
||||
//type Column string
|
||||
//type FilterType string
|
||||
//type EventOrder string
|
||||
//type FetchFilterType string
|
||||
//
|
||||
//const (
|
||||
// UserOs FilterType = "userOs"
|
||||
// UserBrowser FilterType = "userBrowser"
|
||||
// UserDevice FilterType = "userDevice"
|
||||
// UserCountry FilterType = "userCountry"
|
||||
// UserCity FilterType = "userCity"
|
||||
// UserState FilterType = "userState"
|
||||
// UserId FilterType = "userId"
|
||||
// UserAnonymousId FilterType = "userAnonymousId"
|
||||
// Referrer FilterType = "referrer"
|
||||
// RevId FilterType = "revId"
|
||||
// UserOsIos FilterType = "userOsIos"
|
||||
// UserDeviceIos FilterType = "userDeviceIos"
|
||||
// UserCountryIos FilterType = "userCountryIos"
|
||||
// UserIdIos FilterType = "userIdIos"
|
||||
// UserAnonymousIdIos FilterType = "userAnonymousIdIos"
|
||||
// RevIdIos FilterType = "revIdIos"
|
||||
// Duration FilterType = "duration"
|
||||
// Platform FilterType = "platform"
|
||||
// Metadata FilterType = "metadata"
|
||||
// Issue FilterType = "issue"
|
||||
// EventsCount FilterType = "eventsCount"
|
||||
// UtmSource FilterType = "utmSource"
|
||||
// UtmMedium FilterType = "utmMedium"
|
||||
// UtmCampaign FilterType = "utmCampaign"
|
||||
// ThermalState FilterType = "thermalState"
|
||||
// MainThreadCPU FilterType = "mainThreadCPU"
|
||||
// ViewComponent FilterType = "viewComponent"
|
||||
// LogEvent FilterType = "logEvent"
|
||||
// ClickEvent FilterType = "clickEvent"
|
||||
// MemoryUsage FilterType = "memoryUsage"
|
||||
//)
|
||||
//
|
||||
//const (
|
||||
// Click FilterType = "click"
|
||||
// Input FilterType = "input"
|
||||
// Location FilterType = "location"
|
||||
// Custom FilterType = "custom"
|
||||
// Request FilterType = "request"
|
||||
// Fetch FilterType = "fetch"
|
||||
// GraphQL FilterType = "graphql"
|
||||
// StateAction FilterType = "stateAction"
|
||||
// Error FilterType = "error"
|
||||
// Tag FilterType = "tag"
|
||||
// ClickMobile FilterType = "clickMobile"
|
||||
// InputMobile FilterType = "inputMobile"
|
||||
// ViewMobile FilterType = "viewMobile"
|
||||
// CustomMobile FilterType = "customMobile"
|
||||
// RequestMobile FilterType = "requestMobile"
|
||||
// ErrorMobile FilterType = "errorMobile"
|
||||
// SwipeMobile FilterType = "swipeMobile"
|
||||
//)
|
||||
//
|
||||
//const (
|
||||
// EventOrderThen EventOrder = "then"
|
||||
// EventOrderOr EventOrder = "or"
|
||||
// EventOrderAnd EventOrder = "and"
|
||||
//)
|
||||
//
|
||||
//const (
|
||||
// FetchFilterTypeFetchUrl FilterType = "fetchUrl"
|
||||
// FetchFilterTypeFetchStatusCode FilterType = "fetchStatusCode"
|
||||
// FetchFilterTypeFetchMethod FilterType = "fetchMethod"
|
||||
// FetchFilterTypeFetchDuration FilterType = "fetchDuration"
|
||||
// FetchFilterTypeFetchRequestBody FilterType = "fetchRequestBody"
|
||||
// FetchFilterTypeFetchResponseBody FilterType = "fetchResponseBody"
|
||||
//)
|
||||
//
|
||||
//const (
|
||||
// OperatorStringIs = "is"
|
||||
// OperatorStringIsAny = "isAny"
|
||||
// OperatorStringOn = "on"
|
||||
// OperatorStringOnAny = "onAny"
|
||||
// OperatorStringIsNot = "isNot"
|
||||
// OperatorStringIsUndefined = "isUndefined"
|
||||
// OperatorStringNotOn = "notOn"
|
||||
// OperatorStringContains = "contains"
|
||||
// OperatorStringNotContains = "notContains"
|
||||
// OperatorStringStartsWith = "startsWith"
|
||||
// OperatorStringEndsWith = "endsWith"
|
||||
//)
|
||||
//
|
||||
//const (
|
||||
// OperatorMathEq = "="
|
||||
// OperatorMathLt = "<"
|
||||
// OperatorMathGt = ">"
|
||||
// OperatorMathLe = "<="
|
||||
// OperatorMathGe = ">="
|
||||
//)
|
||||
//
|
||||
////--------------------------------------------------
|
||||
//// Constants for columns, tables, etc.
|
||||
////--------------------------------------------------
|
||||
//
|
||||
//const (
|
||||
// TableEvents Table = "product_analytics.events"
|
||||
// TableSessions Table = "experimental.sessions"
|
||||
//
|
||||
// ColEventTime Column = "main.created_at"
|
||||
// ColEventName Column = "main.`$event_name`"
|
||||
// ColEventProjectID Column = "main.project_id"
|
||||
// ColEventProperties Column = "main.`$properties`"
|
||||
// ColEventSessionID Column = "main.session_id"
|
||||
// ColEventURLPath Column = "main.url_path"
|
||||
// ColEventStatus Column = "main.status"
|
||||
//
|
||||
// ColSessionID Column = "s.session_id"
|
||||
// ColDuration Column = "s.duration"
|
||||
// ColUserCountry Column = "s.user_country"
|
||||
// ColUserCity Column = "s.user_city"
|
||||
// ColUserState Column = "s.user_state"
|
||||
// ColUserID Column = "s.user_id"
|
||||
// ColUserAnonymousID Column = "s.user_anonymous_id"
|
||||
// ColUserOS Column = "s.user_os"
|
||||
// ColUserBrowser Column = "s.user_browser"
|
||||
// ColUserDevice Column = "s.user_device"
|
||||
// ColUserDeviceType Column = "s.user_device_type"
|
||||
// ColRevID Column = "s.rev_id"
|
||||
// ColBaseReferrer Column = "s.base_referrer"
|
||||
// ColUtmSource Column = "s.utm_source"
|
||||
// ColUtmMedium Column = "s.utm_medium"
|
||||
// ColUtmCampaign Column = "s.utm_campaign"
|
||||
// ColMetadata1 Column = "s.metadata_1"
|
||||
// ColSessionProjectID Column = "s.project_id"
|
||||
// ColSessionIsNotNull Column = "isNotNull(s.duration)"
|
||||
//)
|
||||
//
|
||||
//type Root struct {
|
||||
// StartTimestamp int64 `json:"startTimestamp"`
|
||||
// EndTimestamp int64 `json:"endTimestamp"`
|
||||
// Series []Series `json:"series"`
|
||||
// ProjectID int64 `json:"projectId"`
|
||||
//}
|
||||
//
|
||||
//type Series struct {
|
||||
// SeriesID int64 `json:"seriesId"`
|
||||
// Name string `json:"name"`
|
||||
// Filter SeriesFilter `json:"filter"`
|
||||
//}
|
||||
//
|
||||
//type SeriesFilter struct {
|
||||
// Filters []FilterObj `json:"filters"`
|
||||
// EventsOrder EventOrder `json:"eventsOrder"`
|
||||
//}
|
||||
//
|
||||
//type FilterObj struct {
|
||||
// Key string `json:"key"`
|
||||
// Type FilterType `json:"type"`
|
||||
// IsEvent bool `json:"isEvent"`
|
||||
// Value []string `json:"value"`
|
||||
// Operator string `json:"operator"`
|
||||
// Source string `json:"source"`
|
||||
// Filters []FilterObj `json:"filters"`
|
||||
//}
|
||||
//
|
||||
//// --------------------------------------------------
|
||||
//func buildQuery(r Root) string {
|
||||
// s := r.Series[0]
|
||||
//
|
||||
// // iterate over series and partition filters
|
||||
// //for _, s := range r.Series {
|
||||
// // sessionFilters, eventFilters := partitionFilters(s.Filter.Filters)
|
||||
// // sessionWhere := buildSessionWhere(sessionFilters)
|
||||
// // eventWhere, seqHaving := buildEventsWhere(eventFilters, s.Filter.EventsOrder)
|
||||
// // fmt.Println("SESSION FILTERS:", sessionFilters)
|
||||
// // fmt.Println("EVENT FILTERS:", eventFilters)
|
||||
// // fmt.Println("SESSION WHERE:", sessionWhere)
|
||||
// // fmt.Println("EVENT WHERE:", eventWhere)
|
||||
// // fmt.Println("SEQ HAVING:", seqHaving)
|
||||
// //}
|
||||
//
|
||||
// sessionFilters, eventFilters := partitionFilters(s.Filter.Filters)
|
||||
// sessionWhere := buildSessionWhere(sessionFilters)
|
||||
// eventWhere, seqHaving := buildEventsWhere(eventFilters, s.Filter.EventsOrder)
|
||||
//
|
||||
// subQuery := fmt.Sprintf(
|
||||
// "SELECT %s,\n"+
|
||||
// " MIN(%s) AS first_event_ts,\n"+
|
||||
// " MAX(%s) AS last_event_ts\n"+
|
||||
// "FROM %s AS main\n"+
|
||||
// "WHERE main.project_id = %%(project_id)s\n"+
|
||||
// " AND %s >= toDateTime(%%(start_time)s/1000)\n"+
|
||||
// " AND %s <= toDateTime(%%(end_time)s/1000)\n"+
|
||||
// " AND (%s)\n"+
|
||||
// "GROUP BY %s\n"+
|
||||
// "HAVING %s",
|
||||
// ColEventSessionID,
|
||||
// ColEventTime,
|
||||
// ColEventTime,
|
||||
// TableEvents,
|
||||
// ColEventTime,
|
||||
// ColEventTime,
|
||||
// strings.Join(eventWhere, " OR "),
|
||||
// ColEventSessionID,
|
||||
// seqHaving,
|
||||
// )
|
||||
//
|
||||
// joinQuery := fmt.Sprintf(
|
||||
// "SELECT *\n"+
|
||||
// "FROM %s AS s\n"+
|
||||
// "INNER JOIN (\n"+
|
||||
// " SELECT DISTINCT ev.session_id, ev.`$current_url` AS url_path\n"+
|
||||
// " FROM %s AS ev\n"+
|
||||
// " WHERE ev.created_at >= toDateTime(%%(start_time)s/1000)\n"+
|
||||
// " AND ev.created_at <= toDateTime(%%(end_time)s/1000)\n"+
|
||||
// " AND ev.project_id = %%(project_id)s\n"+
|
||||
// " AND ev.`$event_name` = 'LOCATION'\n"+
|
||||
// ") AS extra_event USING (session_id)\n"+
|
||||
// "WHERE s.project_id = %%(project_id)s\n"+
|
||||
// " AND %s\n"+
|
||||
// " AND s.datetime >= toDateTime(%%(start_time)s/1000)\n"+
|
||||
// " AND s.datetime <= toDateTime(%%(end_time)s/1000)\n",
|
||||
// TableSessions,
|
||||
// TableEvents,
|
||||
// ColSessionIsNotNull,
|
||||
// )
|
||||
//
|
||||
// if len(sessionWhere) > 0 {
|
||||
// joinQuery += " AND " + strings.Join(sessionWhere, " AND ") + "\n"
|
||||
// }
|
||||
//
|
||||
// main := fmt.Sprintf(
|
||||
// "SELECT s.session_id AS session_id, s.url_path\n"+
|
||||
// "FROM (\n%s\n) AS f\n"+
|
||||
// "INNER JOIN (\n%s) AS s\n"+
|
||||
// " ON (s.session_id = f.session_id)\n",
|
||||
// subQuery,
|
||||
// joinQuery,
|
||||
// )
|
||||
//
|
||||
// final := fmt.Sprintf(
|
||||
// "SELECT COUNT(DISTINCT url_path) OVER () AS main_count,\n"+
|
||||
// " url_path AS name,\n"+
|
||||
// " COUNT(DISTINCT session_id) AS total,\n"+
|
||||
// " COALESCE(SUM(COUNT(DISTINCT session_id)) OVER (), 0) AS total_count\n"+
|
||||
// "FROM (\n%s) AS filtered_sessions\n"+
|
||||
// "GROUP BY url_path\n"+
|
||||
// "ORDER BY total DESC\n"+
|
||||
// "LIMIT 200 OFFSET 0;",
|
||||
// main,
|
||||
// )
|
||||
//
|
||||
// return final
|
||||
//}
|
||||
//
|
||||
//func partitionFilters(filters []FilterObj) (sessionFilters, eventFilters []FilterObj) {
|
||||
// for _, f := range filters {
|
||||
// if f.IsEvent {
|
||||
// eventFilters = append(eventFilters, f)
|
||||
// } else {
|
||||
// sessionFilters = append(sessionFilters, f)
|
||||
// }
|
||||
// }
|
||||
// return
|
||||
//}
|
||||
//
|
||||
//func buildSessionWhere(filters []FilterObj) []string {
|
||||
// var conds []string
|
||||
// for _, f := range filters {
|
||||
// switch f.Type {
|
||||
// case UserCountry:
|
||||
// conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserCountry, concatValues(f.Value)))
|
||||
// case UserCity:
|
||||
// conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserCity, concatValues(f.Value)))
|
||||
// case UserState:
|
||||
// conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserState, concatValues(f.Value)))
|
||||
// case UserId:
|
||||
// conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserID, concatValues(f.Value)))
|
||||
// case UserAnonymousId:
|
||||
// conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserAnonymousID, concatValues(f.Value)))
|
||||
// case UserOs:
|
||||
// conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserOS, concatValues(f.Value)))
|
||||
// case UserBrowser:
|
||||
// conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserBrowser, concatValues(f.Value)))
|
||||
// case UserDevice:
|
||||
// conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserDevice, concatValues(f.Value)))
|
||||
// case Platform:
|
||||
// conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserDeviceType, concatValues(f.Value)))
|
||||
// case RevId:
|
||||
// conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColRevID, concatValues(f.Value)))
|
||||
// case Referrer:
|
||||
// conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColBaseReferrer, concatValues(f.Value)))
|
||||
// case Duration:
|
||||
// if len(f.Value) == 2 {
|
||||
// conds = append(conds, fmt.Sprintf("%s >= '%s'", ColDuration, f.Value[0]))
|
||||
// conds = append(conds, fmt.Sprintf("%s <= '%s'", ColDuration, f.Value[1]))
|
||||
// }
|
||||
// case UtmSource:
|
||||
// conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUtmSource, concatValues(f.Value)))
|
||||
// case UtmMedium:
|
||||
// conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUtmMedium, concatValues(f.Value)))
|
||||
// case UtmCampaign:
|
||||
// conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUtmCampaign, concatValues(f.Value)))
|
||||
// case Metadata:
|
||||
// conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColMetadata1, concatValues(f.Value)))
|
||||
// }
|
||||
// }
|
||||
// // add /n to each condition
|
||||
// for i := range conds {
|
||||
// conds[i] += "\n"
|
||||
// }
|
||||
// return conds
|
||||
//}
|
||||
//
|
||||
//func parseOperator(op string) string {
|
||||
// switch strings.ToLower(op) {
|
||||
// case OperatorStringContains:
|
||||
// return OperatorMathEq // interpret as "LIKE" if needed
|
||||
// case OperatorStringIs, OperatorStringOn, "=", OperatorStringOnAny:
|
||||
// return OperatorMathEq
|
||||
// case OperatorStringStartsWith:
|
||||
// // might interpret differently in real impl
|
||||
// return OperatorMathEq
|
||||
// case OperatorStringEndsWith:
|
||||
// // might interpret differently in real impl
|
||||
// return OperatorMathEq
|
||||
// default:
|
||||
// return OperatorMathEq
|
||||
// }
|
||||
//}
|
||||
//
|
||||
//func buildEventsWhere(filters []FilterObj, order EventOrder) (eventConditions []string, having string) {
|
||||
// basicEventTypes := "(" +
|
||||
// strings.Join([]string{
|
||||
// fmt.Sprintf("%s = 'CLICK'", ColEventName),
|
||||
// fmt.Sprintf("%s = 'INPUT'", ColEventName),
|
||||
// fmt.Sprintf("%s = 'LOCATION'", ColEventName),
|
||||
// fmt.Sprintf("%s = 'CUSTOM'", ColEventName),
|
||||
// fmt.Sprintf("%s = 'REQUEST'", ColEventName),
|
||||
// }, " OR ") + ")"
|
||||
//
|
||||
// var seq []string
|
||||
// for _, f := range filters {
|
||||
// switch f.Type {
|
||||
// case Click:
|
||||
// seq = append(seq, seqCond("CLICK", "selector", f))
|
||||
// case Input:
|
||||
// seq = append(seq, seqCond("INPUT", "label", f))
|
||||
// case Location:
|
||||
// seq = append(seq, seqCond("LOCATION", "url_path", f))
|
||||
// case Custom:
|
||||
// seq = append(seq, seqCond("CUSTOM", "name", f))
|
||||
// case Fetch:
|
||||
// seq = append(seq, seqFetchCond("REQUEST", f))
|
||||
// case FetchFilterTypeFetchStatusCode:
|
||||
// seq = append(seq, seqCond("REQUEST", "status", f))
|
||||
// default:
|
||||
// seq = append(seq, fmt.Sprintf("(%s = '%s')", ColEventName, strings.ToUpper(string(f.Type))))
|
||||
// }
|
||||
// }
|
||||
// eventConditions = []string{basicEventTypes}
|
||||
//
|
||||
// // then => sequenceMatch
|
||||
// // or => OR
|
||||
// // and => AND
|
||||
// switch order {
|
||||
// case EventOrderThen:
|
||||
// var pattern []string
|
||||
// for i := range seq {
|
||||
// pattern = append(pattern, fmt.Sprintf("(?%d)", i+1))
|
||||
// }
|
||||
// having = fmt.Sprintf("sequenceMatch('%s')(\n%s,\n%s)",
|
||||
// strings.Join(pattern, ""), fmt.Sprintf("toUnixTimestamp(%s)", ColEventTime), strings.Join(seq, ",\n"))
|
||||
// case EventOrderAnd:
|
||||
// // build AND
|
||||
// having = strings.Join(seq, " AND ")
|
||||
// default:
|
||||
// // default => OR
|
||||
// var orParts []string
|
||||
// for _, p := range seq {
|
||||
// orParts = append(orParts, "("+p+")")
|
||||
// }
|
||||
// having = strings.Join(orParts, " OR ")
|
||||
// }
|
||||
// return
|
||||
//}
|
||||
//
|
||||
//func seqCond(eventName, key string, f FilterObj) string {
|
||||
// op := parseOperator(f.Operator)
|
||||
// return fmt.Sprintf("(%s = '%s' AND JSONExtractString(toString(%s), '%s') %s '%s')",
|
||||
// ColEventName, strings.ToUpper(eventName), ColEventProperties, key, op, concatValues(f.Value))
|
||||
//}
|
||||
//
|
||||
//func seqFetchCond(eventName string, f FilterObj) string {
|
||||
// w := []string{fmt.Sprintf("(%s = '%s')", ColEventName, strings.ToUpper(eventName))}
|
||||
// var extras []string
|
||||
// for _, c := range f.Filters {
|
||||
// switch c.Type {
|
||||
// case Fetch:
|
||||
// if len(c.Value) > 0 {
|
||||
// extras = append(extras, fmt.Sprintf("(%s = '%s')", ColEventURLPath, concatValues(c.Value)))
|
||||
// }
|
||||
// case FetchFilterTypeFetchStatusCode:
|
||||
// if len(c.Value) > 0 {
|
||||
// extras = append(extras, fmt.Sprintf("(%s = '%s')", ColEventStatus, concatValues(c.Value)))
|
||||
// }
|
||||
// default:
|
||||
// // placeholder if needed
|
||||
// }
|
||||
// }
|
||||
// if len(extras) > 0 {
|
||||
// w = append(w, strings.Join(extras, " AND "))
|
||||
// }
|
||||
// return "(" + strings.Join(w, " AND ") + ")"
|
||||
//}
|
||||
//
|
||||
//func concatValues(v []string) string {
|
||||
// return strings.Join(v, "")
|
||||
//}
|
||||
|
||||
//const jsonInput = `
|
||||
//{
|
||||
// "startTimestamp": 1737043724664,
|
||||
// "endTimestamp": 1737130124664,
|
||||
// "projectId": 1,
|
||||
// "series": [
|
||||
// {
|
||||
// "seriesId": 610,
|
||||
// "name": "Series 1",
|
||||
// "filter": {
|
||||
// "filters": [
|
||||
// {
|
||||
// "type": "click",
|
||||
// "isEvent": true,
|
||||
// "value": ["DEPLOYMENT"],
|
||||
// "operator": "on",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "input",
|
||||
// "isEvent": true,
|
||||
// "value": ["a"],
|
||||
// "operator": "contains",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "location",
|
||||
// "isEvent": true,
|
||||
// "value": ["/en/using-or/"],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "userCountry",
|
||||
// "isEvent": false,
|
||||
// "value": ["AD"],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "userCity",
|
||||
// "isEvent": false,
|
||||
// "value": ["Mumbai"],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "userState",
|
||||
// "isEvent": false,
|
||||
// "value": ["Maharashtra"],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "userId",
|
||||
// "isEvent": false,
|
||||
// "value": ["test@test.com"],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "userAnonymousId",
|
||||
// "isEvent": false,
|
||||
// "value": ["asd"],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "userOs",
|
||||
// "isEvent": false,
|
||||
// "value": ["Mac OS X"],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "userBrowser",
|
||||
// "isEvent": false,
|
||||
// "value": ["Chrome"],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "userDevice",
|
||||
// "isEvent": false,
|
||||
// "value": ["iPhone"],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "platform",
|
||||
// "isEvent": false,
|
||||
// "value": ["desktop"],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "revId",
|
||||
// "isEvent": false,
|
||||
// "value": ["v1"],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "referrer",
|
||||
// "isEvent": false,
|
||||
// "value": ["https://www.google.com/"],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "duration",
|
||||
// "isEvent": false,
|
||||
// "value": ["60000", "6000000"],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "utmSource",
|
||||
// "isEvent": false,
|
||||
// "value": ["aaa"],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "utmMedium",
|
||||
// "isEvent": false,
|
||||
// "value": ["aa"],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "utmCampaign",
|
||||
// "isEvent": false,
|
||||
// "value": ["aaa"],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "metadata",
|
||||
// "isEvent": false,
|
||||
// "value": ["bbbb"],
|
||||
// "operator": "is",
|
||||
// "source": "userId",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "custom",
|
||||
// "isEvent": true,
|
||||
// "value": ["test"],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "fetch",
|
||||
// "isEvent": true,
|
||||
// "value": [],
|
||||
// "operator": "is",
|
||||
// "filters": [
|
||||
// {
|
||||
// "type": "fetchUrl",
|
||||
// "isEvent": false,
|
||||
// "value": ["/ai/docs/chat"],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "fetchStatusCode",
|
||||
// "isEvent": false,
|
||||
// "value": ["400"],
|
||||
// "operator": "=",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "fetchMethod",
|
||||
// "isEvent": false,
|
||||
// "value": [],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "fetchDuration",
|
||||
// "isEvent": false,
|
||||
// "value": [],
|
||||
// "operator": "=",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "fetchRequestBody",
|
||||
// "isEvent": false,
|
||||
// "value": [],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// },
|
||||
// {
|
||||
// "type": "fetchResponseBody",
|
||||
// "isEvent": false,
|
||||
// "value": [],
|
||||
// "operator": "is",
|
||||
// "filters": []
|
||||
// }
|
||||
// ]
|
||||
// }
|
||||
// ],
|
||||
// "eventsOrder": "then"
|
||||
// }
|
||||
// }
|
||||
// ]
|
||||
//}
|
||||
//`
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
package query
|
||||
|
||||
type FunnelQueryBuilder struct{}
|
||||
|
||||
func (f FunnelQueryBuilder) Build(p MetricPayload) string {
|
||||
return "-- Funnel query placeholder"
|
||||
}
|
||||
|
|
@ -1,137 +0,0 @@
|
|||
package query
|
||||
|
||||
type Table string
|
||||
type Column string
|
||||
type MetricType string
|
||||
type FilterType string
|
||||
type EventOrder string
|
||||
|
||||
const (
|
||||
TableEvents Table = "product_analytics.events"
|
||||
TableSessions Table = "experimental.sessions"
|
||||
)
|
||||
|
||||
const (
|
||||
ColEventTime Column = "main.created_at"
|
||||
ColEventName Column = "main.`$event_name`"
|
||||
ColEventProjectID Column = "main.project_id"
|
||||
ColEventProperties Column = "main.`$properties`"
|
||||
ColEventSessionID Column = "main.session_id"
|
||||
ColEventURLPath Column = "main.url_path"
|
||||
ColEventStatus Column = "main.status"
|
||||
)
|
||||
|
||||
const (
|
||||
ColSessionID Column = "s.session_id"
|
||||
ColDuration Column = "s.duration"
|
||||
ColUserCountry Column = "s.user_country"
|
||||
ColUserCity Column = "s.user_city"
|
||||
ColUserState Column = "s.user_state"
|
||||
ColUserID Column = "s.user_id"
|
||||
ColUserAnonymousID Column = "s.user_anonymous_id"
|
||||
ColUserOS Column = "s.user_os"
|
||||
ColUserBrowser Column = "s.user_browser"
|
||||
ColUserDevice Column = "s.user_device"
|
||||
ColUserDeviceType Column = "s.user_device_type"
|
||||
ColRevID Column = "s.rev_id"
|
||||
ColBaseReferrer Column = "s.base_referrer"
|
||||
ColUtmSource Column = "s.utm_source"
|
||||
ColUtmMedium Column = "s.utm_medium"
|
||||
ColUtmCampaign Column = "s.utm_campaign"
|
||||
ColMetadata1 Column = "s.metadata_1"
|
||||
ColSessionProjectID Column = "s.project_id"
|
||||
ColSessionIsNotNull Column = "isNotNull(s.duration)"
|
||||
)
|
||||
|
||||
const (
|
||||
MetricTypeTimeseries MetricType = "timeseries"
|
||||
MetricTypeTable MetricType = "table"
|
||||
MetricTypeFunnel MetricType = "funnel"
|
||||
)
|
||||
|
||||
const (
|
||||
EventOrderThen EventOrder = "then"
|
||||
EventOrderOr EventOrder = "or"
|
||||
EventOrderAnd EventOrder = "and"
|
||||
)
|
||||
|
||||
type MetricPayload struct {
|
||||
StartTimestamp int64 `json:"startTimestamp"`
|
||||
EndTimestamp int64 `json:"endTimestamp"`
|
||||
Density int `json:"density"`
|
||||
MetricOf string `json:"metricOf"`
|
||||
MetricType MetricType `json:"metricType"`
|
||||
MetricFormat string `json:"metricFormat"`
|
||||
ViewType string `json:"viewType"`
|
||||
Name string `json:"name"`
|
||||
Series []Series `json:"series"`
|
||||
CompareTo *string `json:"compareTo"`
|
||||
}
|
||||
|
||||
type Series struct {
|
||||
Name string `json:"name"`
|
||||
Filter struct {
|
||||
Filters []Filter `json:"filters"`
|
||||
EventsOrder EventOrder `json:"eventsOrder"`
|
||||
} `json:"filter"`
|
||||
}
|
||||
|
||||
type Filter struct {
|
||||
Type FilterType `json:"type"`
|
||||
IsEvent bool `json:"isEvent"`
|
||||
Value []string `json:"value"`
|
||||
Operator string `json:"operator"`
|
||||
Filters []Filter `json:"filters"`
|
||||
}
|
||||
|
||||
const (
|
||||
FilterUserOs FilterType = "userOs"
|
||||
FilterUserBrowser FilterType = "userBrowser"
|
||||
FilterUserDevice FilterType = "userDevice"
|
||||
FilterUserCountry FilterType = "userCountry"
|
||||
FilterUserCity FilterType = "userCity"
|
||||
FilterUserState FilterType = "userState"
|
||||
FilterUserId FilterType = "userId"
|
||||
FilterUserAnonymousId FilterType = "userAnonymousId"
|
||||
FilterReferrer FilterType = "referrer"
|
||||
FilterRevId FilterType = "revId"
|
||||
FilterUserOsIos FilterType = "userOsIos"
|
||||
FilterUserDeviceIos FilterType = "userDeviceIos"
|
||||
FilterUserCountryIos FilterType = "userCountryIos"
|
||||
FilterUserIdIos FilterType = "userIdIos"
|
||||
FilterUserAnonymousIdIos FilterType = "userAnonymousIdIos"
|
||||
FilterRevIdIos FilterType = "revIdIos"
|
||||
FilterDuration FilterType = "duration"
|
||||
FilterPlatform FilterType = "platform"
|
||||
FilterMetadata FilterType = "metadata"
|
||||
FilterIssue FilterType = "issue"
|
||||
FilterEventsCount FilterType = "eventsCount"
|
||||
FilterUtmSource FilterType = "utmSource"
|
||||
FilterUtmMedium FilterType = "utmMedium"
|
||||
FilterUtmCampaign FilterType = "utmCampaign"
|
||||
FilterThermalState FilterType = "thermalState"
|
||||
FilterMainThreadCPU FilterType = "mainThreadCPU"
|
||||
FilterViewComponent FilterType = "viewComponent"
|
||||
FilterLogEvent FilterType = "logEvent"
|
||||
FilterMemoryUsage FilterType = "memoryUsage"
|
||||
FilterClick FilterType = "click"
|
||||
FilterInput FilterType = "input"
|
||||
FilterLocation FilterType = "location"
|
||||
FilterCustom FilterType = "customEvent"
|
||||
FilterFetch FilterType = "fetch"
|
||||
FilterFetchStatusCode FilterType = "status"
|
||||
)
|
||||
|
||||
const (
|
||||
OperatorStringIs = "is"
|
||||
OperatorStringIsAny = "isAny"
|
||||
OperatorStringOn = "on"
|
||||
OperatorStringOnAny = "onAny"
|
||||
OperatorStringIsNot = "isNot"
|
||||
OperatorStringIsUndefined = "isUndefined"
|
||||
OperatorStringNotOn = "notOn"
|
||||
OperatorStringContains = "contains"
|
||||
OperatorStringNotContains = "notContains"
|
||||
OperatorStringStartsWith = "startsWith"
|
||||
OperatorStringEndsWith = "endsWith"
|
||||
)
|
||||
|
|
@ -1,253 +0,0 @@
|
|||
package query
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type NewQueryBuilder interface {
|
||||
Build(MetricPayload) string
|
||||
}
|
||||
|
||||
func buildEventSubquery(p MetricPayload) string {
|
||||
baseEventsWhere := buildBaseEventsWhere(p)
|
||||
sequenceCond := buildSequenceCondition(p.Series)
|
||||
sessionsWhere := buildSessionsWhere(p)
|
||||
|
||||
// If there's no sequence pattern, skip HAVING entirely.
|
||||
if sequenceCond.seqPattern == "" {
|
||||
return fmt.Sprintf(`
|
||||
SELECT s.%[1]s AS %[1]s,
|
||||
s.datetime AS datetime
|
||||
FROM (
|
||||
SELECT main.session_id,
|
||||
MIN(main.created_at) AS first_event_ts,
|
||||
MAX(main.created_at) AS last_event_ts
|
||||
FROM product_analytics.events AS main
|
||||
WHERE %[2]s
|
||||
GROUP BY session_id
|
||||
) AS f
|
||||
INNER JOIN (
|
||||
SELECT *
|
||||
FROM experimental.sessions AS s
|
||||
WHERE %[3]s
|
||||
) AS s ON (s.session_id = f.session_id)
|
||||
`, pickIDField(p), baseEventsWhere, sessionsWhere)
|
||||
}
|
||||
|
||||
return fmt.Sprintf(`
|
||||
SELECT s.%[1]s AS %[1]s,
|
||||
s.datetime AS datetime
|
||||
FROM (
|
||||
SELECT main.session_id,
|
||||
MIN(main.created_at) AS first_event_ts,
|
||||
MAX(main.created_at) AS last_event_ts
|
||||
FROM product_analytics.events AS main
|
||||
WHERE %[2]s
|
||||
GROUP BY session_id
|
||||
HAVING sequenceMatch('%[3]s')(toDateTime(main.created_at), %[4]s)
|
||||
) AS f
|
||||
INNER JOIN (
|
||||
SELECT *
|
||||
FROM experimental.sessions AS s
|
||||
WHERE %[5]s
|
||||
) AS s ON (s.session_id = f.session_id)
|
||||
`, pickIDField(p), baseEventsWhere, sequenceCond.seqPattern, sequenceCond.seqEvents, sessionsWhere)
|
||||
}
|
||||
|
||||
func pickIDField(p MetricPayload) string {
|
||||
if p.MetricOf == "userCount" {
|
||||
return "user_id"
|
||||
}
|
||||
return "session_id"
|
||||
}
|
||||
|
||||
func buildBaseEventsWhere(p MetricPayload) string {
|
||||
projectID := 5
|
||||
ts := fmt.Sprintf(
|
||||
`(main.created_at >= toDateTime(%d / 1000) AND main.created_at <= toDateTime(%d / 1000))`,
|
||||
p.StartTimestamp,
|
||||
p.EndTimestamp,
|
||||
)
|
||||
return fmt.Sprintf(`main.project_id = %d AND %s`, projectID, ts)
|
||||
}
|
||||
|
||||
func buildSessionsWhere(p MetricPayload) string {
|
||||
projectID := 5
|
||||
ts := fmt.Sprintf(
|
||||
`(s.datetime >= toDateTime(%d / 1000) AND s.datetime <= toDateTime(%d / 1000))`,
|
||||
p.StartTimestamp,
|
||||
p.EndTimestamp,
|
||||
)
|
||||
return fmt.Sprintf(`s.project_id = %d AND isNotNull(s.duration) AND %s`, projectID, ts)
|
||||
}
|
||||
|
||||
type sequenceParts struct {
|
||||
seqPattern string
|
||||
seqEvents string
|
||||
}
|
||||
|
||||
func buildSequenceCondition(series []Series) sequenceParts {
|
||||
var events []string
|
||||
for _, s := range series {
|
||||
if len(s.Filter.Filters) > 0 {
|
||||
events = append(events, buildOneSeriesSequence(s.Filter.Filters))
|
||||
}
|
||||
}
|
||||
|
||||
if len(events) == 0 {
|
||||
return sequenceParts{"", ""}
|
||||
}
|
||||
|
||||
// For n events, we need a pattern like `(?1)(?2)(?3)...( ?n )`.
|
||||
pattern := ""
|
||||
for i := 1; i <= len(events); i++ {
|
||||
pattern += fmt.Sprintf("(?%d)", i)
|
||||
}
|
||||
|
||||
return sequenceParts{
|
||||
seqPattern: pattern,
|
||||
seqEvents: strings.Join(events, ", "),
|
||||
}
|
||||
}
|
||||
|
||||
func buildOneSeriesSequence(filters []Filter) string {
|
||||
return strings.Join(buildFilterConditions(filters), " AND ")
|
||||
}
|
||||
|
||||
func buildFilterConditions(filters []Filter) []string {
|
||||
var out []string
|
||||
for _, f := range filters {
|
||||
switch f.Type {
|
||||
case FilterClick:
|
||||
out = append(out,
|
||||
fmt.Sprintf(`(main."$event_name" = 'CLICK' AND JSONExtractString(toString(main."$properties"), 'label') IN ('%s'))`,
|
||||
strings.Join(f.Value, "','")))
|
||||
case FilterInput:
|
||||
out = append(out,
|
||||
fmt.Sprintf(`(main."$event_name" = 'INPUT' AND JSONExtractString(toString(main."$properties"), 'label') IN ('%s'))`,
|
||||
strings.Join(f.Value, "','")))
|
||||
// TODO add more cases to cover all the events
|
||||
default:
|
||||
out = append(out,
|
||||
fmt.Sprintf(`(main."$event_name" = '%s')`, strings.ToUpper(string(f.Type))))
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func main() {
|
||||
//input := GetPayload(MetricTypeTimeseries)
|
||||
input := GetPayload(MetricTypeTable)
|
||||
|
||||
var payload MetricPayload
|
||||
err := json.Unmarshal([]byte(input), &payload)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
var qb NewQueryBuilder
|
||||
switch payload.MetricType {
|
||||
case MetricTypeTimeseries:
|
||||
qb = TimeSeriesQueryBuilder{}
|
||||
case MetricTypeFunnel:
|
||||
qb = FunnelQueryBuilder{}
|
||||
case MetricTypeTable:
|
||||
qb = TableQueryBuilder{}
|
||||
default:
|
||||
qb = TimeSeriesQueryBuilder{}
|
||||
}
|
||||
|
||||
query := qb.Build(payload)
|
||||
fmt.Println(query)
|
||||
}
|
||||
|
||||
func GetPayload(metricType MetricType) string {
|
||||
switch metricType {
|
||||
case MetricTypeTimeseries:
|
||||
return `{
|
||||
"startTimestamp": 1738796399999,
|
||||
"endTimestamp": 1739401199999,
|
||||
"density": 7,
|
||||
"metricOf": "sessionCount",
|
||||
"metricValue": [],
|
||||
"metricType": "timeseries",
|
||||
"metricFormat": "sessionCount",
|
||||
"viewType": "lineChart",
|
||||
"name": "Untitled Trend",
|
||||
"series": [
|
||||
{
|
||||
"name": "Series 1",
|
||||
"filter": {
|
||||
"filters": [
|
||||
{
|
||||
"type": "userId",
|
||||
"isEvent": false,
|
||||
"value": [
|
||||
"test@test.com"
|
||||
],
|
||||
"operator": "is",
|
||||
"filters": []
|
||||
}
|
||||
],
|
||||
"eventsOrder": "then"
|
||||
}
|
||||
}
|
||||
]
|
||||
}`
|
||||
case MetricTypeFunnel:
|
||||
return `{}`
|
||||
case MetricTypeTable:
|
||||
return `{
|
||||
"startTimestamp": 1737586800000,
|
||||
"endTimestamp": 1738277999999,
|
||||
"density": 7,
|
||||
"metricOf": "userDevice",
|
||||
"metricType": "table",
|
||||
"metricFormat": "sessionCount",
|
||||
"viewType": "table",
|
||||
"name": "Untitled Trend",
|
||||
"series": [
|
||||
{
|
||||
"name": "Series 1",
|
||||
"filter": {
|
||||
"filters": [
|
||||
{
|
||||
"type": "click",
|
||||
"isEvent": true,
|
||||
"value": ["Manuscripts"],
|
||||
"operator": "on",
|
||||
"filters": []
|
||||
}
|
||||
],
|
||||
"eventsOrder": "then"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Series 2",
|
||||
"filter": {
|
||||
"filters": [
|
||||
{
|
||||
"type": "input",
|
||||
"isEvent": true,
|
||||
"value": ["test"],
|
||||
"operator": "is",
|
||||
"filters": []
|
||||
}
|
||||
],
|
||||
"eventsOrder": "then"
|
||||
}
|
||||
}
|
||||
],
|
||||
"page": 1,
|
||||
"limit": 20,
|
||||
"compareTo": null,
|
||||
"config": {
|
||||
"col": 2
|
||||
}
|
||||
}`
|
||||
default:
|
||||
return `{}`
|
||||
}
|
||||
}
|
||||
|
|
@ -1,252 +0,0 @@
|
|||
package query
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type TableQueryBuilder struct{}
|
||||
|
||||
func (t TableQueryBuilder) Build(p MetricPayload) string {
|
||||
return t.buildQuery(p)
|
||||
}
|
||||
|
||||
func (t TableQueryBuilder) buildQuery(r MetricPayload) string {
|
||||
s := r.Series[0]
|
||||
sessionFilters, eventFilters := partitionFilters(s.Filter.Filters)
|
||||
sessionWhere := buildSessionWhere(sessionFilters)
|
||||
eventWhere, seqHaving := buildEventsWhere(eventFilters, s.Filter.EventsOrder)
|
||||
|
||||
subQuery := fmt.Sprintf(
|
||||
"SELECT %s,\n"+
|
||||
" MIN(%s) AS first_event_ts,\n"+
|
||||
" MAX(%s) AS last_event_ts\n"+
|
||||
"FROM %s AS main\n"+
|
||||
"WHERE main.project_id = %%(project_id)s\n"+
|
||||
" AND %s >= toDateTime(%%(start_time)s/1000)\n"+
|
||||
" AND %s <= toDateTime(%%(end_time)s/1000)\n"+
|
||||
" AND (%s)\n"+
|
||||
"GROUP BY %s\n"+
|
||||
"HAVING %s",
|
||||
ColEventSessionID,
|
||||
ColEventTime,
|
||||
ColEventTime,
|
||||
TableEvents,
|
||||
ColEventTime,
|
||||
ColEventTime,
|
||||
strings.Join(eventWhere, " OR "),
|
||||
ColEventSessionID,
|
||||
seqHaving,
|
||||
)
|
||||
|
||||
joinQuery := fmt.Sprintf(
|
||||
"SELECT *\n"+
|
||||
"FROM %s AS s\n"+
|
||||
"INNER JOIN (\n"+
|
||||
" SELECT DISTINCT ev.session_id, ev.`$current_url` AS url_path\n"+
|
||||
" FROM %s AS ev\n"+
|
||||
" WHERE ev.created_at >= toDateTime(%%(start_time)s/1000)\n"+
|
||||
" AND ev.created_at <= toDateTime(%%(end_time)s/1000)\n"+
|
||||
" AND ev.project_id = %%(project_id)s\n"+
|
||||
" AND ev.`$event_name` = 'LOCATION'\n"+
|
||||
") AS extra_event USING (session_id)\n"+
|
||||
"WHERE s.project_id = %%(project_id)s\n"+
|
||||
" AND isNotNull(s.duration)\n"+
|
||||
" AND s.datetime >= toDateTime(%%(start_time)s/1000)\n"+
|
||||
" AND s.datetime <= toDateTime(%%(end_time)s/1000)\n",
|
||||
TableSessions,
|
||||
TableEvents,
|
||||
)
|
||||
|
||||
if len(sessionWhere) > 0 {
|
||||
joinQuery += " AND " + strings.Join(sessionWhere, " AND ") + "\n"
|
||||
}
|
||||
|
||||
main := fmt.Sprintf(
|
||||
"SELECT s.session_id AS session_id, s.url_path\n"+
|
||||
"FROM (\n%s\n) AS f\n"+
|
||||
"INNER JOIN (\n%s) AS s\n"+
|
||||
" ON (s.session_id = f.session_id)\n",
|
||||
subQuery,
|
||||
joinQuery,
|
||||
)
|
||||
|
||||
final := fmt.Sprintf(
|
||||
"SELECT COUNT(DISTINCT url_path) OVER () AS main_count,\n"+
|
||||
" url_path AS name,\n"+
|
||||
" COUNT(DISTINCT session_id) AS total,\n"+
|
||||
" COALESCE(SUM(COUNT(DISTINCT session_id)) OVER (), 0) AS total_count\n"+
|
||||
"FROM (\n%s) AS filtered_sessions\n"+
|
||||
"GROUP BY url_path\n"+
|
||||
"ORDER BY total DESC\n"+
|
||||
"LIMIT 200 OFFSET 0;",
|
||||
main,
|
||||
)
|
||||
|
||||
return final
|
||||
}
|
||||
|
||||
func partitionFilters(filters []Filter) (sessionFilters []Filter, eventFilters []Filter) {
|
||||
for _, f := range filters {
|
||||
if f.IsEvent {
|
||||
eventFilters = append(eventFilters, f)
|
||||
} else {
|
||||
sessionFilters = append(sessionFilters, f)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func buildSessionWhere(filters []Filter) []string {
|
||||
var conds []string
|
||||
for _, f := range filters {
|
||||
switch f.Type {
|
||||
case FilterUserCountry:
|
||||
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserCountry, concatValues(f.Value)))
|
||||
case FilterUserCity:
|
||||
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserCity, concatValues(f.Value)))
|
||||
case FilterUserState:
|
||||
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserState, concatValues(f.Value)))
|
||||
case FilterUserId:
|
||||
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserID, concatValues(f.Value)))
|
||||
case FilterUserAnonymousId:
|
||||
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserAnonymousID, concatValues(f.Value)))
|
||||
case FilterUserOs:
|
||||
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserOS, concatValues(f.Value)))
|
||||
case FilterUserBrowser:
|
||||
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserBrowser, concatValues(f.Value)))
|
||||
case FilterUserDevice:
|
||||
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserDevice, concatValues(f.Value)))
|
||||
case FilterPlatform:
|
||||
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserDeviceType, concatValues(f.Value)))
|
||||
case FilterRevId:
|
||||
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColRevID, concatValues(f.Value)))
|
||||
case FilterReferrer:
|
||||
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColBaseReferrer, concatValues(f.Value)))
|
||||
case FilterDuration:
|
||||
if len(f.Value) == 2 {
|
||||
conds = append(conds, fmt.Sprintf("%s >= '%s'", ColDuration, f.Value[0]))
|
||||
conds = append(conds, fmt.Sprintf("%s <= '%s'", ColDuration, f.Value[1]))
|
||||
}
|
||||
case FilterUtmSource:
|
||||
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUtmSource, concatValues(f.Value)))
|
||||
case FilterUtmMedium:
|
||||
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUtmMedium, concatValues(f.Value)))
|
||||
case FilterUtmCampaign:
|
||||
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUtmCampaign, concatValues(f.Value)))
|
||||
case FilterMetadata:
|
||||
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColMetadata1, concatValues(f.Value)))
|
||||
}
|
||||
}
|
||||
// adding /n to each condition for better readability, can be removed.
|
||||
for i := range conds {
|
||||
conds[i] += "\n"
|
||||
}
|
||||
return conds
|
||||
}
|
||||
|
||||
func concatValues(v []string) string {
|
||||
return strings.Join(v, "")
|
||||
}
|
||||
|
||||
func buildEventsWhere(filters []Filter, order EventOrder) (eventConditions []string, having string) {
|
||||
basicEventTypes := "(" +
|
||||
strings.Join([]string{
|
||||
fmt.Sprintf("%s = 'CLICK'", ColEventName),
|
||||
fmt.Sprintf("%s = 'INPUT'", ColEventName),
|
||||
fmt.Sprintf("%s = 'LOCATION'", ColEventName),
|
||||
fmt.Sprintf("%s = 'CUSTOM'", ColEventName),
|
||||
fmt.Sprintf("%s = 'REQUEST'", ColEventName),
|
||||
}, " OR ") + ")"
|
||||
|
||||
var seq []string
|
||||
for _, f := range filters {
|
||||
switch f.Type {
|
||||
case FilterClick:
|
||||
seq = append(seq, seqCond("CLICK", "selector", f))
|
||||
case FilterInput:
|
||||
seq = append(seq, seqCond("INPUT", "label", f))
|
||||
case FilterLocation:
|
||||
seq = append(seq, seqCond("LOCATION", "url_path", f))
|
||||
case FilterCustom:
|
||||
seq = append(seq, seqCond("CUSTOM", "name", f))
|
||||
case FilterFetch:
|
||||
seq = append(seq, seqFetchCond("REQUEST", f))
|
||||
case FilterFetchStatusCode:
|
||||
seq = append(seq, seqCond("REQUEST", "status", f))
|
||||
default:
|
||||
seq = append(seq, fmt.Sprintf("(%s = '%s')", ColEventName, strings.ToUpper(string(f.Type))))
|
||||
}
|
||||
}
|
||||
eventConditions = []string{basicEventTypes}
|
||||
|
||||
// then => sequenceMatch
|
||||
// or => OR
|
||||
// and => AND
|
||||
switch order {
|
||||
case EventOrderThen:
|
||||
var pattern []string
|
||||
for i := range seq {
|
||||
pattern = append(pattern, fmt.Sprintf("(?%d)", i+1))
|
||||
}
|
||||
having = fmt.Sprintf("sequenceMatch('%s')(\n%s,\n%s)",
|
||||
strings.Join(pattern, ""), fmt.Sprintf("toUnixTimestamp(%s)", ColEventTime), strings.Join(seq, ",\n"))
|
||||
case EventOrderAnd:
|
||||
// build AND
|
||||
having = strings.Join(seq, " AND ")
|
||||
default:
|
||||
// default => OR
|
||||
var orParts []string
|
||||
for _, p := range seq {
|
||||
orParts = append(orParts, "("+p+")")
|
||||
}
|
||||
having = strings.Join(orParts, " OR ")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func seqCond(eventName, key string, f Filter) string {
|
||||
op := parseOperator(f.Operator)
|
||||
return fmt.Sprintf("(%s = '%s' AND JSONExtractString(toString(%s), '%s') %s '%s')",
|
||||
ColEventName, strings.ToUpper(eventName), ColEventProperties, key, op, concatValues(f.Value))
|
||||
}
|
||||
|
||||
func seqFetchCond(eventName string, f Filter) string {
|
||||
w := []string{fmt.Sprintf("(%s = '%s')", ColEventName, strings.ToUpper(eventName))}
|
||||
var extras []string
|
||||
for _, c := range f.Filters {
|
||||
switch c.Type {
|
||||
case FilterFetch:
|
||||
if len(c.Value) > 0 {
|
||||
extras = append(extras, fmt.Sprintf("(%s = '%s')", ColEventURLPath, concatValues(c.Value)))
|
||||
}
|
||||
case FilterFetchStatusCode:
|
||||
if len(c.Value) > 0 {
|
||||
extras = append(extras, fmt.Sprintf("(%s = '%s')", ColEventStatus, concatValues(c.Value)))
|
||||
}
|
||||
default:
|
||||
// placeholder if needed
|
||||
}
|
||||
}
|
||||
if len(extras) > 0 {
|
||||
w = append(w, strings.Join(extras, " AND "))
|
||||
}
|
||||
return "(" + strings.Join(w, " AND ") + ")"
|
||||
}
|
||||
|
||||
func parseOperator(op string) string {
|
||||
// TODO implement this properly
|
||||
switch strings.ToLower(op) {
|
||||
case OperatorStringContains:
|
||||
return "LIKE"
|
||||
case OperatorStringIs, OperatorStringOn, "=", OperatorStringOnAny:
|
||||
return "="
|
||||
case OperatorStringStartsWith:
|
||||
return "LIKE"
|
||||
case OperatorStringEndsWith:
|
||||
// might interpret differently in real impl
|
||||
return "="
|
||||
default:
|
||||
return "="
|
||||
}
|
||||
}
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
package query
|
||||
|
||||
import "fmt"
|
||||
|
||||
type TimeSeriesQueryBuilder struct{}
|
||||
|
||||
func (t TimeSeriesQueryBuilder) Build(p MetricPayload) string {
|
||||
switch p.MetricOf {
|
||||
case "sessionCount":
|
||||
return t.buildSessionCountQuery(p)
|
||||
case "userCount":
|
||||
return t.buildUserCountQuery(p)
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
func (TimeSeriesQueryBuilder) buildSessionCountQuery(p MetricPayload) string {
|
||||
subquery := buildEventSubquery(p)
|
||||
return fmt.Sprintf(`SELECT toUnixTimestamp(
|
||||
toStartOfInterval(processed_sessions.datetime, INTERVAL 115199 second)
|
||||
) * 1000 AS timestamp,
|
||||
COUNT(processed_sessions.session_id) AS count
|
||||
FROM (
|
||||
%s
|
||||
) AS processed_sessions
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;`, subquery)
|
||||
}
|
||||
|
||||
func (TimeSeriesQueryBuilder) buildUserCountQuery(p MetricPayload) string {
|
||||
subquery := buildEventSubquery(p)
|
||||
return fmt.Sprintf(`SELECT toUnixTimestamp(
|
||||
toStartOfInterval(processed_sessions.datetime, INTERVAL 115199 second)
|
||||
) * 1000 AS timestamp,
|
||||
COUNT(DISTINCT processed_sessions.user_id) AS count
|
||||
FROM (
|
||||
%s
|
||||
) AS processed_sessions
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;`, subquery)
|
||||
}
|
||||
|
|
@ -108,15 +108,15 @@ func (c *connectorImpl) newBatch(name, query string) error {
|
|||
var batches = map[string]string{
|
||||
"sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, platform, timezone, utm_source, utm_medium, utm_campaign) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?)",
|
||||
"autocompletes": "INSERT INTO experimental.autocomplete (project_id, type, value) VALUES (?, ?, SUBSTR(?, 1, 8000))",
|
||||
"pages": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"clicks": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"inputs": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$duration_s", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"errors": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", error_id, "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"performance": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"requests": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$duration_s", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"custom": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"graphql": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"issuesEvents": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", issue_type, issue_id, "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"pages": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"clicks": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"inputs": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$duration_s", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"errors": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", error_id, "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"performance": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"requests": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$duration_s", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"custom": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"graphql": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"issuesEvents": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", issue_type, issue_id, "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"issues": "INSERT INTO experimental.issues (project_id, issue_id, type, context_string) VALUES (?, ?, ?, ?)",
|
||||
"mobile_sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, platform, timezone) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?, ?)",
|
||||
"mobile_custom": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
|
|
@ -251,7 +251,6 @@ func (c *connectorImpl) InsertWebInputDuration(session *sessions.Session, msg *m
|
|||
"hesitation_time": nullableUint32(uint32(msg.HesitationTime)),
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal input event: %s", err)
|
||||
|
|
@ -268,13 +267,6 @@ func (c *connectorImpl) InsertWebInputDuration(session *sessions.Session, msg *m
|
|||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.Url),
|
||||
nullableUint16(uint16(msg.InputDuration)),
|
||||
jsonString,
|
||||
); err != nil {
|
||||
|
|
@ -299,7 +291,6 @@ func (c *connectorImpl) InsertMouseThrashing(session *sessions.Session, msg *mes
|
|||
"url_hostpath": hostpath,
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal issue event: %s", err)
|
||||
|
|
@ -316,13 +307,6 @@ func (c *connectorImpl) InsertMouseThrashing(session *sessions.Session, msg *mes
|
|||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.Url),
|
||||
"mouse_thrashing",
|
||||
issueID,
|
||||
jsonString,
|
||||
|
|
@ -363,7 +347,6 @@ func (c *connectorImpl) InsertIssue(session *sessions.Session, msg *messages.Iss
|
|||
"url_hostpath": hostpath,
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal issue event: %s", err)
|
||||
|
|
@ -380,13 +363,6 @@ func (c *connectorImpl) InsertIssue(session *sessions.Session, msg *messages.Iss
|
|||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.Url),
|
||||
msg.Type,
|
||||
issueID,
|
||||
jsonString,
|
||||
|
|
@ -460,7 +436,6 @@ func (c *connectorImpl) InsertWebPageEvent(session *sessions.Session, msg *messa
|
|||
"load_event_time": loadEventTime,
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal page event: %s", err)
|
||||
|
|
@ -477,12 +452,6 @@ func (c *connectorImpl) InsertWebPageEvent(session *sessions.Session, msg *messa
|
|||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.URL),
|
||||
jsonString,
|
||||
); err != nil {
|
||||
|
|
@ -527,7 +496,6 @@ func (c *connectorImpl) InsertWebClickEvent(session *sessions.Session, msg *mess
|
|||
"url_hostpath": hostpath,
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal click event: %s", err)
|
||||
|
|
@ -544,12 +512,6 @@ func (c *connectorImpl) InsertWebClickEvent(session *sessions.Session, msg *mess
|
|||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.Url),
|
||||
jsonString,
|
||||
); err != nil {
|
||||
|
|
@ -573,7 +535,6 @@ func (c *connectorImpl) InsertWebErrorEvent(session *sessions.Session, msg *type
|
|||
"message": msg.Message,
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal error event: %s", err)
|
||||
|
|
@ -590,13 +551,6 @@ func (c *connectorImpl) InsertWebErrorEvent(session *sessions.Session, msg *type
|
|||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.Url),
|
||||
msgID,
|
||||
jsonString,
|
||||
); err != nil {
|
||||
|
|
@ -631,7 +585,6 @@ func (c *connectorImpl) InsertWebPerformanceTrackAggr(session *sessions.Session,
|
|||
"max_used_js_heap_size": msg.MaxUsedJSHeapSize,
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal performance event: %s", err)
|
||||
|
|
@ -648,13 +601,6 @@ func (c *connectorImpl) InsertWebPerformanceTrackAggr(session *sessions.Session,
|
|||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.Url),
|
||||
jsonString,
|
||||
); err != nil {
|
||||
c.checkError("performance", err)
|
||||
|
|
@ -690,7 +636,6 @@ func (c *connectorImpl) InsertRequest(session *sessions.Session, msg *messages.N
|
|||
"url_hostpath": hostpath,
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal request event: %s", err)
|
||||
|
|
@ -707,13 +652,6 @@ func (c *connectorImpl) InsertRequest(session *sessions.Session, msg *messages.N
|
|||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.URL),
|
||||
nullableUint16(uint16(msg.Duration)),
|
||||
jsonString,
|
||||
); err != nil {
|
||||
|
|
@ -725,10 +663,10 @@ func (c *connectorImpl) InsertRequest(session *sessions.Session, msg *messages.N
|
|||
|
||||
func (c *connectorImpl) InsertCustom(session *sessions.Session, msg *messages.CustomEvent) error {
|
||||
jsonString, err := json.Marshal(map[string]interface{}{
|
||||
"name": msg.Name,
|
||||
"payload": msg.Payload,
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal custom event: %s", err)
|
||||
|
|
@ -738,20 +676,13 @@ func (c *connectorImpl) InsertCustom(session *sessions.Session, msg *messages.Cu
|
|||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
getUUID(msg),
|
||||
msg.Name,
|
||||
"CUSTOM",
|
||||
eventTime,
|
||||
eventTime.Unix(),
|
||||
session.UserUUID,
|
||||
false,
|
||||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.Url),
|
||||
jsonString,
|
||||
); err != nil {
|
||||
c.checkError("custom", err)
|
||||
|
|
@ -767,7 +698,6 @@ func (c *connectorImpl) InsertGraphQL(session *sessions.Session, msg *messages.G
|
|||
"response_body": nullableString(msg.Response),
|
||||
"user_device": session.UserDevice,
|
||||
"user_device_type": session.UserDeviceType,
|
||||
"page_title ": msg.PageTitle,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't marshal graphql event: %s", err)
|
||||
|
|
@ -784,13 +714,6 @@ func (c *connectorImpl) InsertGraphQL(session *sessions.Session, msg *messages.G
|
|||
true,
|
||||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
session.UserOS,
|
||||
session.UserBrowser,
|
||||
session.Referrer,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
cropString(msg.Url),
|
||||
jsonString,
|
||||
); err != nil {
|
||||
c.checkError("graphql", err)
|
||||
|
|
|
|||
|
|
@ -84,10 +84,7 @@ func (p *poolImpl) Begin() (*Tx, error) {
|
|||
tx, err := p.conn.Begin(context.Background())
|
||||
p.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "begin", "")
|
||||
p.metrics.IncreaseTotalRequests("begin", "")
|
||||
return &Tx{
|
||||
origTx: tx,
|
||||
metrics: p.metrics,
|
||||
}, err
|
||||
return &Tx{tx, p.metrics}, err
|
||||
}
|
||||
|
||||
func (p *poolImpl) Close() {
|
||||
|
|
@ -97,31 +94,22 @@ func (p *poolImpl) Close() {
|
|||
// TX - start
|
||||
|
||||
type Tx struct {
|
||||
origTx pgx.Tx
|
||||
pgx.Tx
|
||||
metrics database.Database
|
||||
}
|
||||
|
||||
func (tx *Tx) TxExec(sql string, args ...interface{}) error {
|
||||
start := time.Now()
|
||||
_, err := tx.origTx.Exec(context.Background(), sql, args...)
|
||||
_, err := tx.Exec(context.Background(), sql, args...)
|
||||
method, table := methodName(sql)
|
||||
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table)
|
||||
tx.metrics.IncreaseTotalRequests(method, table)
|
||||
return err
|
||||
}
|
||||
|
||||
func (tx *Tx) TxQuery(sql string, args ...interface{}) (pgx.Rows, error) {
|
||||
start := time.Now()
|
||||
res, err := tx.origTx.Query(getTimeoutContext(), sql, args...)
|
||||
method, table := methodName(sql)
|
||||
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table)
|
||||
tx.metrics.IncreaseTotalRequests(method, table)
|
||||
return res, err
|
||||
}
|
||||
|
||||
func (tx *Tx) TxQueryRow(sql string, args ...interface{}) pgx.Row {
|
||||
start := time.Now()
|
||||
res := tx.origTx.QueryRow(context.Background(), sql, args...)
|
||||
res := tx.QueryRow(context.Background(), sql, args...)
|
||||
method, table := methodName(sql)
|
||||
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table)
|
||||
tx.metrics.IncreaseTotalRequests(method, table)
|
||||
|
|
@ -130,7 +118,7 @@ func (tx *Tx) TxQueryRow(sql string, args ...interface{}) pgx.Row {
|
|||
|
||||
func (tx *Tx) TxRollback() error {
|
||||
start := time.Now()
|
||||
err := tx.origTx.Rollback(context.Background())
|
||||
err := tx.Rollback(context.Background())
|
||||
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "rollback", "")
|
||||
tx.metrics.IncreaseTotalRequests("rollback", "")
|
||||
return err
|
||||
|
|
@ -138,7 +126,7 @@ func (tx *Tx) TxRollback() error {
|
|||
|
||||
func (tx *Tx) TxCommit() error {
|
||||
start := time.Now()
|
||||
err := tx.origTx.Commit(context.Background())
|
||||
err := tx.Commit(context.Background())
|
||||
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "commit", "")
|
||||
tx.metrics.IncreaseTotalRequests("commit", "")
|
||||
return err
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue