Compare commits

..

156 commits

Author SHA1 Message Date
nick-delirium
90510aa33b ui: fix double metric selection in list 2025-06-06 16:19:54 +02:00
GitHub Action
96a70f5d41 Increment frontend chart version to v1.22.42 2025-06-04 11:41:56 +02:00
rjshrjndrn
d4a13edcf0 fix(actions): frontend image with proper tag
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-06-04 11:33:19 +02:00
GitHub Action
51fad91a22 Increment frontend chart version to v1.22.41 2025-06-04 10:48:50 +02:00
nick-delirium
36abcda1e1 ui: fix audioplayer start point 2025-06-04 10:39:08 +02:00
Mehdi Osman
dd5f464f73
Increment frontend chart version to v1.22.40 (#3479)
Co-authored-by: GitHub Action <action@github.com>
2025-06-03 16:22:12 +02:00
Delirium
f9ada41272
ui: recreate period on db visit (#3478) 2025-06-03 16:05:52 +02:00
rjshrjndrn
9e24a3583e feat(nginx): add integrations endpoint with CORS support
Add new /integrations/ location block that proxies requests to
integrations-openreplay:8080 service. Includes proper CORS headers
for cross-origin requests and WebSocket upgrade support.

- Rewrite /integrations/ path to root
- Configure proxy headers for forwarding
- Set connection timeouts for stability
- Add CORS headers for API access

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-06-02 10:55:50 +02:00
Taha Yassine Kraiem
0a3129d3cd fix(chalice): fixed JIRA integration 2025-05-30 15:25:41 +02:00
Mehdi Osman
99d61db9d9
Increment frontend chart version to v1.22.39 (#3460)
Co-authored-by: GitHub Action <action@github.com>
2025-05-30 15:07:29 +02:00
Delirium
133958622e
ui: fix alert create button (#3459) 2025-05-30 14:56:21 +02:00
GitHub Action
fb021f606f Increment frontend chart version to v1.22.38 2025-05-29 12:21:04 +02:00
rjshrjndrn
a2905fa8ed fix: move cd - command after git operations in patch workflow
Move the directory restoration command after the git operations to
ensure all git commands execute in the correct working directory
before returning to the previous directory.

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-29 12:16:28 +02:00
rjshrjndrn
beec2283fd refactor(ci): restructure patch-build workflow script
- Extract inline bash script into structured functions
- Add proper error handling with set -euo pipefail
- Improve variable scoping with readonly and local declarations
- Add descriptive function names and comments
- Fix shell quoting and parameter expansion
- Consolidate build logic into reusable functions
- Add proper cleanup of temporary files
- Improve readability and maintainability of the CI script

The refactored script maintains the same functionality while being
more robust and easier to understand.

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-29 12:16:28 +02:00
GitHub Action
6c8b55019e Increment frontend chart version 2025-05-29 10:29:46 +02:00
rjshrjndrn
e3e3e11227 fix(action): proper registry
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-29 10:18:55 +02:00
Shekar Siri
c6f7de04cc Revert "fix(ui): new card data state is not updating"
This reverts commit 2921c17cbf.
2025-05-28 22:16:00 +02:00
Shekar Siri
2921c17cbf fix(ui): new card data state is not updating 2025-05-28 19:49:01 +02:00
Mehdi Osman
7eb3f5c4c8
Increment frontend chart version (#3436)
Co-authored-by: GitHub Action <action@github.com>
2025-05-26 16:10:35 +02:00
Rajesh Rajendran
5a9a8e588a
chore(actions): rebase only if not main (#3435)
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-26 16:04:50 +02:00
Rajesh Rajendran
4b14258266
fix(action): clone repo (#3433)
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-26 15:50:13 +02:00
Rajesh Rajendran
744d2d4311
actions fix or 2070 (#3432)
* chore(build): Better error handling

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* fix(build): remove fetch depth, as it might cause issue in rebase

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* fix(build): proper platform

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

---------

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-26 15:45:48 +02:00
Taha Yassine Kraiem
64242a5dc0 refactor(DB): changed supported platforms in CH 2025-05-26 11:51:49 +02:00
Rajesh Rajendran
cae3002697
feat(ci): Support building from branch for old patch (#3419)
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-20 15:19:04 +02:00
GitHub Action
3d3c62196b Increment frontend chart version 2025-05-20 11:44:16 +02:00
nick-delirium
e810958a5d ui: fix ant imports 2025-05-20 11:26:20 +02:00
nick-delirium
39fa9787d1 ui: prevent network row modal from changing replayer time 2025-05-20 11:21:50 +02:00
nick-delirium
c9c1ad4dde ui: comments etc 2025-05-20 11:21:50 +02:00
nick-delirium
d9868928be ui: improve network panel row mapping 2025-05-20 11:21:50 +02:00
GitHub Action
a460d8c9a2 Increment frontend chart version 2025-05-15 15:18:19 +02:00
nick-delirium
930417aab4 ui: fix session search on url change 2025-05-15 15:12:30 +02:00
GitHub Action
07bc184f4d Increment chalice chart version 2025-05-14 18:59:43 +02:00
Rajesh Rajendran
71b7cca569
Patch/api v1.22.0 (#3401)
* fix(chalice): fixed duplicate autocomplete values

* ci(actions): possible fix for pull --rebase

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

---------

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
Co-authored-by: Taha Yassine Kraiem <tahayk2@gmail.com>
2025-05-14 18:42:25 +02:00
Mehdi Osman
355d27eaa0
Increment frontend chart version (#3397)
Co-authored-by: GitHub Action <action@github.com>
2025-05-13 13:38:15 +02:00
Mehdi Osman
66b485cccf
Increment db chart version (#3396)
Co-authored-by: GitHub Action <action@github.com>
2025-05-13 10:34:28 +02:00
Alexander
de33a42151
feat(db): custom event's ts (#3395) 2025-05-12 17:52:24 +02:00
Rajesh Rajendran
f12bdebf82
ci(actions): fix push denied (#3392) (#3393) (#3394)
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-12 17:19:41 +02:00
Rajesh Rajendran
bbfa20c693
ci(actions): fix push denied (#3392) (#3393)
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-12 16:58:19 +02:00
Rajesh Rajendran
f264ba043d
ci(actions): fix push denied (#3392)
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-12 16:55:23 +02:00
Rajesh Rajendran
a05dce8125
main (#3391)
* ci(actions): Update pr description

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* ci(actions): run only on pull request merge

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

---------

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-12 16:50:20 +02:00
Mehdi Osman
3a1635d81f
Increment frontend chart version (#3389)
Co-authored-by: GitHub Action <action@github.com>
2025-05-12 16:12:43 +02:00
Delirium
ccb332c636
ui: change <slot> check (#3388) 2025-05-12 16:02:26 +02:00
Rajesh Rajendran
80ffa15959
ci(actions): Auto update tag for patch build (#3387)
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-12 15:54:10 +02:00
Rajesh Rajendran
b2e961d621
ci(actions): Auto update tag for patch build (#3386)
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-12 15:49:19 +02:00
Mehdi Osman
b4d0598f23
Increment frontend chart version (#3385)
Co-authored-by: GitHub Action <action@github.com>
2025-05-12 15:46:29 +02:00
Delirium
e77f083f10
ui: fixup toggler closing (#3384) 2025-05-12 15:40:30 +02:00
Delirium
58da1d3f64
fix litjs support, fix autocomplete modal options reset, fix dashboard chart density (#3382)
* Litjs fixes2 (#3381)

* ui: fixes for litjs capture

* ui: introduce vmode for lwc light dom

* ui: fixup the mode toggle and remover

* ui: fix filter options reset, fix dashboard chart density
2025-05-12 15:27:44 +02:00
GitHub Action
447fc26a2a Increment frontend chart version 2025-05-12 10:46:33 +02:00
nick-delirium
9bdf6e4f92 ui: fix heatmaps crash 2025-05-12 10:37:48 +02:00
GitHub Action
01f403e12d Increment chalice chart version 2025-05-07 12:28:44 +02:00
Taha Yassine Kraiem
39eb943b86 fix(chalice): fixed get error's details 2025-05-07 12:15:33 +02:00
GitHub Action
366b0d38b0 Increment frontend chart version 2025-05-06 16:28:28 +02:00
nick-delirium
f4d5b3c06e ui: fix max meta length, add horizontal layout for player 2025-05-06 16:23:47 +02:00
Mehdi Osman
93ae18133e
Increment frontend chart version (#3366)
Co-authored-by: GitHub Action <action@github.com>
2025-05-06 13:16:57 +02:00
Andrey Babushkin
fbe5d78270
Revert update (#3365)
* Revert "Increment chalice chart version"

This reverts commit 5e0e5730ba.

* revert updates

* changed chalice version
2025-05-06 13:08:08 +02:00
Mehdi Osman
b803eed1d4
Increment frontend chart version (#3362)
Co-authored-by: GitHub Action <action@github.com>
2025-05-05 17:49:39 +02:00
Andrey Babushkin
9ed3cb1b7e
Add searched events (#3361)
* add filtered events to search

* removed consoles

* changed styles to tailwind

* changed styles to tailwind

* fixed errors
2025-05-05 17:40:10 +02:00
GitHub Action
5e0e5730ba Increment chalice chart version 2025-05-05 17:04:29 +02:00
Taha Yassine Kraiem
d78b33dcd2 refactor(DB): remove TTL for CH tables 2025-05-05 16:49:37 +02:00
Taha Yassine Kraiem
4b1ca200b4 fix(chalice): fixed empty error_id for table of errors 2025-05-05 16:49:37 +02:00
rjshrjndrn
08d930f9ff fix(docker-compose): proper volume path #3279
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-04-28 17:28:40 +02:00
Mehdi Osman
da37809bc8
Increment frontend chart version (#3345)
Co-authored-by: GitHub Action <action@github.com>
2025-04-28 11:38:04 +02:00
Andrey Babushkin
d922fc7ad5
Patch frontend inline css (#3344)
* add inlineCss enum

* updated changelog
2025-04-28 11:29:53 +02:00
GitHub Action
796360fdd2 Increment frontend chart version 2025-04-28 11:01:55 +02:00
nick-delirium
13dbb60d8b ui: fix velement applychanges 2025-04-28 10:40:11 +02:00
Андрей Бабушкин
9e20a49128 add slot tag to custom elements 2025-04-28 10:34:43 +02:00
nick-delirium
91f8cc1399 ui: move debouncecall 2025-04-28 10:34:43 +02:00
Andrey Babushkin
f8ba3f6d89 Css batching (#3326)
* tracker: initial css inlining functionality

* tracker: add tests, adjust sheet id, stagger rule sending

* ui: rereoute custom html component fragments

* removed sorting

---------

Co-authored-by: nick-delirium <nikita@openreplay.com>
2025-04-28 10:34:43 +02:00
Delirium
85e30b3692 tracker css batching/inlining (#3334)
* tracker: initial css inlining functionality

* tracker: add tests, adjust sheet id, stagger rule sending

* removed sorting

* upgrade css inliner

* ui: better logging for ocunter

* tracker: force-fetch mode for cssInliner

* tracker: fix ts warns

* tracker: use debug opts

* tracker: 16.2.0 changelogs, inliner opts

* tracker: remove debug options

---------

Co-authored-by: Андрей Бабушкин <andreybabushkin2000@gmail.com>
2025-04-28 10:34:43 +02:00
nick-delirium
0360e3726e ui: fixup autoplay on inactive tabs 2025-04-28 10:34:43 +02:00
nick-delirium
77bbb5af36 tracker: update css inject 2025-04-28 10:34:43 +02:00
Andrey Babushkin
ab0d4cfb62 Css inliner tuning (#3337)
* tracker: don't send double sheets

* tracker: don't send double sheets

* tracker: slot checker

* add slot tag to custom elements

---------

Co-authored-by: nick-delirium <nikita@openreplay.com>
2025-04-28 10:34:43 +02:00
Andrey Babushkin
3fd506a812 Css batching (#3326)
* tracker: initial css inlining functionality

* tracker: add tests, adjust sheet id, stagger rule sending

* ui: rereoute custom html component fragments

* removed sorting

---------

Co-authored-by: nick-delirium <nikita@openreplay.com>
2025-04-28 10:34:43 +02:00
Shekar Siri
e8432e2dec change(ui): force the table cards events order to use and istead the defaul then 2025-04-24 10:09:19 +02:00
GitHub Action
5c76a8524c Increment frontend chart version 2025-04-23 18:41:46 +02:00
rjshrjndrn
3ba40a4811 feat(cli): Add support for image versions
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-04-23 17:52:50 +02:00
rjshrjndrn
f9a3f24590 fix(docker-compose): clickhouse migration
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-04-23 17:52:50 +02:00
rjshrjndrn
85d6d0abac fix(docker-compose): remove shell interpolation
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-04-23 17:52:50 +02:00
Rajesh Rajendran
b3594136ce or 1940 upstream docker release with the existing installation (#3316)
* chore(docker): Adding dynamic env generator
* ci(make): Create deployment yamls
* ci(make): Generating docker envs
* change env name structure
* proper env names
* chore(docker): clickhouse
* chore(docker-compose): generate env file format
* chore(docker-compose): Adding docker-compose
* chore(docker-compose): format make
* chore(docker-compose): Update version
* chore(docker-compose): adding new secrets
* ci(make): default target
* ci(Makefile): Update common protocol
* chore(docker-compose): refactor folder structure
* ci(make): rename to docker-envs
* feat(docker): add clickhouse volume definition
Add clickhouse persistent volume to the docker-compose configuration
to ensure data is preserved between container restarts.
* refactor: move env files to docker-envs directory
Updates all environment file references in docker-compose.yaml to use a
consistent directory structure, placing them under the docker-envs/
directory for better organization.
* fix(docker): rename imagestorage to images
 The `imagestorage` service and related environment file
 have been renamed to `images` for clarity and consistency.
 This change reflects the service's purpose of handling
 images.
* feat(docker): introduce docker-compose template
 A new docker-compose template
 to generate docker-compose files from a list of services.
 The template uses helm syntax.
* fix: Properly set FILES variable in Makefile
 The FILES variable was not being set correctly in the
 Makefile due to subshell issues. This commit fixes the
 variable assignment and ensures that the variable is
 accessible in subsequent commands.
* feat: Refactor docker-compose template for local development
 This commit introduces a complete overhaul of the
 docker-compose template, switching from a helm-based
 template to a native docker-compose.yml file. This
 change simplifies local development and makes it easier
 to manage the OpenReplay stack.
 The new template includes services for:
 - PostgreSQL
 - ClickHouse
 - Redis
 - MinIO
 - Nginx
 - Caddy
 It also includes migration jobs for setting up the
 database and MinIO.
* fix(docker-compose): Add fallback empty environment
 Add an empty environment to the docker-compose template to prevent
 errors when the env_file is missing. This ensures that the
 container can start even if the environment file is not present.
* feat(docker): Add domainname and aliases to services
 This change adds the `domainname` and `aliases` attributes to each
 service in the docker-compose.yaml file. This is to ensure that
 the services can communicate with each other using their fully
 qualified domain names. Also adds shared volume and empty
 environment variables.
* update version
* chore(docker): don't pull parallel
* chore(docker-compose): proper pull
* chore(docker-compose): Update db service urls
* fix(docker-compose): clickhouse url
* chore(clickhouse): Adding clickhouse db migration
* chore(docker-compose): Adding clickhouse
* fix(tpl): variable injection
* chore(fix): compose tpl variable rendering
* chore(docker-compose): Allow override pg variable
* chore(helm): remove assist-server
* chore(helm): pg integrations
* chore(nginx): removed services
* chore(docker-compose): Mulitple aliases
* chore(docker-compose): Adding more env vars
* feat(install): Dynamically generate passwords
 dynamic password generation by
 identifying `change_me_*` entries in `common.env` and
 replacing them with random passwords. This enhances
 security and simplifies initial setup.
 The changes include:
 - Replacing hardcoded password replacements with a loop
   that iterates through all `change_me_*` entries.
 - Using `grep` to find all `change_me_*` tokens.
 - Generating a random password for each token.
 - Updating the `common.env` file with the generated
   passwords.
* chore(docker-compose): disable clickhouse password
* fix(docker-compose): clickhouse-migration
* compose: chalice env
* chore(docker-compose): overlay vars
* chore(docker): Adding ch port
* chore(docker-compose): disable clickhouse password
* fix(docker-compose): migration name
* feat(docker): skip specific values
* chore(docker-compose): define namespace
---------

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-04-23 17:52:50 +02:00
GitHub Action
8f67edde8d Increment chalice chart version 2025-04-23 12:26:20 +02:00
Taha Yassine Kraiem
74ed29915b fix(chalice): enforce AND operator for table of requests and table of pages 2025-04-23 11:51:38 +02:00
GitHub Action
3ca71ec211 Increment chalice chart version 2025-04-22 19:23:11 +02:00
Taha Yassine Kraiem
0e469fd056 fix(chalice): fixes for table of requests 2025-04-22 19:03:35 +02:00
KRAIEM Taha Yassine
a8cb0e1643 fix(chalice): fixes for table of requests 2025-04-22 19:03:35 +02:00
GitHub Action
e171f0d8d5 Increment frontend chart version 2025-04-22 17:56:00 +02:00
nick-delirium
68ea291444 ui: fix timepicker and timezone interactions 2025-04-22 17:42:56 +02:00
GitHub Action
05cbb831c7 Increment frontend chart version 2025-04-22 10:32:00 +02:00
nick-delirium
5070ded1f4 ui: fix empty sank sessions fetch 2025-04-22 10:27:16 +02:00
GitHub Action
77610a4924 Increment frontend chart version 2025-04-16 17:45:25 +02:00
nick-delirium
7c34e4a0f6 ui: virtualizer for filter options list 2025-04-16 17:36:34 +02:00
GitHub Action
330e21183f Increment frontend chart version 2025-04-15 18:25:49 +02:00
Shekar Siri
30ce37896c feat(widget-sessions): improve session filtering logic
- Refactored session filtering logic to handle nested filters properly.
- Enhanced `fetchSessions` to ensure null checks and avoid errors.
- Updated `loadData` to handle `USER_PATH` and `HEATMAP` metric types.
- Improved UI consistency by adjusting spacing and formatting.
- Replaced redundant code with cleaner, more maintainable patterns.

This change improves the reliability and readability of the session
filtering and loading logic in the WidgetSessions component.
2025-04-15 18:15:03 +02:00
Andrey Babushkin
80a7817e7d
removed sorting by id (#3305) 2025-04-15 13:32:53 +02:00
Jorgen Evens
1b9c568cb1 fix(helm): fix broken volumeMounts indentation 2025-04-14 15:51:41 +02:00
GitHub Action
3759771ae9 Increment frontend chart version 2025-04-14 12:06:09 +02:00
Shekar Siri
f6ae5aba88 feat(SessionsBy): add specific filter for FETCH metric
Added a conditional check to handle the FETCH metric in the SessionsBy
component. When the metric is FETCH, a specific filter with key
FETCH_URL, operator is, and value derived from data.name is applied.
This ensures proper filtering behavior for FETCH-related metrics.
2025-04-14 12:01:51 +02:00
Mehdi Osman
5190dc512a
Increment frontend chart version (#3297)
Co-authored-by: GitHub Action <action@github.com>
2025-04-14 11:54:25 +02:00
Andrey Babushkin
3fcccb51e8
Patch assist (#3296)
* add global method support

* fix errors

* remove wrong updates

* remove wrong updates

* add onDrag as option

* fix wrong updates
2025-04-14 11:33:06 +02:00
GitHub Action
26077d5689 Increment frontend chart version 2025-04-11 14:56:11 +02:00
Shekar Siri
00c57348fd feat(search): enhance filter value handling
- Added `checkFilterValue` function to validate and update filter values
  in `SearchStoreLive`.
- Updated `FilterItem` to handle undefined `value` gracefully by providing
  a default empty array.

These changes improve robustness in filter value processing.
2025-04-11 14:36:25 +02:00
Shekar Siri
1f9bc5520a feat(search): add rounding to next minutes for date ranges
- Introduced `roundToNextMinutes` utility function to round timestamps
  to the next specified minute interval.
- Updated `Search` class to use the rounding function for non-custom
  date ranges.
- Modified `getRange` in `period.js` to align LAST_24_HOURS with
  15-minute intervals.
- Added `roundToNextMinutes` implementation in `utils/index.ts`.
2025-04-11 12:01:15 +02:00
Shekar Siri
aef94618f6 Revert "Increment frontend chart version"
This reverts commit 2a330318c7.
2025-04-11 11:03:01 +02:00
GitHub Action
2a330318c7 Increment frontend chart version 2025-04-11 11:01:53 +02:00
Shekar Siri
6777d5ce2a feat(dashboard): set initial drill down period
Change default drill down period from LAST_7_DAYS to LAST_24_HOURS
and preserve current period when drilling down on chart click
2025-04-11 10:49:17 +02:00
rjshrjndrn
8a6f8fe91f chore(action): cloning specific tag
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-04-10 15:45:50 +02:00
Mehdi Osman
7b078fed4c
Increment frontend chart version (#3278)
Co-authored-by: GitHub Action <action@github.com>
2025-04-07 15:24:32 +02:00
Andrey Babushkin
894d4c84b3
Patch assist canvas (#3277)
* resolved conflict

* removed comments
2025-04-07 15:13:36 +02:00
Alexander
46390a3ba9
feat(assist-server): added the github action (#3275) 2025-04-07 10:43:48 +02:00
rjshrjndrn
621667f5ce ci(action): Build and patch github tags
feat(workflow): update commit timestamp for patching

Add a step to set the commit timestamp of the HEAD commit to be 1
second newer than the oldest of the last 3 commits. This ensures
proper chronological order while preserving the commit content.

- Fetch deeper history to access commit history
- Get oldest timestamp from recent commits
- Set new commit date with BSD-compatible date command
- Verify timestamp change with git log

The workflow was previously checking out 'main' branch with a
comment indicating it needed to be fixed. This change makes it
properly checkout the tag specified by the workflow input.

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-04-04 16:09:05 +02:00
rjshrjndrn
a72f476f1c chore(ci): tag patching
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-04-04 13:15:56 +02:00
Mehdi Osman
623946ce4e
Increment assist chart version (#3267)
Co-authored-by: GitHub Action <action@github.com>
2025-04-03 13:29:02 -04:00
Mehdi Osman
2d099214fc
Increment frontend chart version (#3266)
Co-authored-by: GitHub Action <action@github.com>
2025-04-03 18:27:05 +02:00
Andrey Babushkin
b0e7054f89
Assist patch canvas (#3265)
* add agent info to assist and tracker

* removed AGENTS_CONNECTED event
2025-04-03 18:22:08 +02:00
Mehdi Osman
a9097270af
Increment chalice chart version (#3260)
Co-authored-by: GitHub Action <action@github.com>
2025-04-02 16:43:46 +02:00
Alexander
5d514ddaf2
feat(chalice): added for_spot=True for authenticate_sso (#3259) 2025-04-02 16:35:19 +02:00
Mehdi Osman
43688bb03b
Increment assist chart version (#3256)
Co-authored-by: GitHub Action <action@github.com>
2025-04-01 16:04:41 +02:00
Mehdi Osman
e050cee7bb
Increment frontend chart version (#3255)
Co-authored-by: GitHub Action <action@github.com>
2025-03-31 18:19:52 +02:00
Andrey Babushkin
6b35df7125
pulled updates (#3254) 2025-03-31 18:13:51 +02:00
GitHub Action
8e099b6dc3 Increment frontend chart version 2025-03-31 17:25:58 +02:00
nick-delirium
c0a4734054 ui: fix double fetches for sessions 2025-03-31 17:19:33 +02:00
GitHub Action
7de1efb5fe Increment frontend chart version 2025-03-31 12:08:45 +02:00
nick-delirium
d4ff28ddbe ui: fix modules label 2025-03-31 11:54:13 +02:00
nick-delirium
b2256f72d0 ui: fix modules mapper 2025-03-31 11:48:14 +02:00
GitHub Action
a63bda1c79 Increment frontend chart version 2025-03-31 11:17:34 +02:00
nick-delirium
3a0176789e ui: filter keys 2025-03-31 10:34:02 +02:00
nick-delirium
f2b7271fca ui: add old devtool filters 2025-03-31 10:31:06 +02:00
GitHub Action
d50f89662b Increment frontend chart version 2025-03-28 21:37:59 +01:00
GitHub Action
35051d201c Increment assist chart version 2025-03-28 21:37:59 +01:00
rjshrjndrn
214be95ecc fix(init): remove duplicate clone
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-03-28 21:25:24 +01:00
Delirium
dbc142c114
UI patches (28.03) (#3231)
* ui: force getting url for location in tabmanagers

* Assist add turn servers (#3229)

* fixed conflicts

* add offers

* add config to sicket query

* add config to sicket query

* add config init

* removed console logs

* removed wrong updates

* fixed conflicts

* add offers

* add config to sicket query

* add config to sicket query

* add config init

* removed console logs

* removed wrong updates

* ui: fix chat draggable, fix default params

---------

Co-authored-by: nick-delirium <nikita@openreplay.com>

* ui: fix spritemap generation for assist sessions

* ui: fix yarnlock

* fix errors

* updated widget link

* resolved conflicts

* updated widget url

---------

Co-authored-by: Andrey Babushkin <55714097+reyand43@users.noreply.github.com>
Co-authored-by: Андрей Бабушкин <andreybabushkin2000@gmail.com>
2025-03-28 17:32:12 +01:00
GitHub Action
443f5e8f08 Increment frontend chart version 2025-03-27 12:36:54 +01:00
Shekar Siri
9f693f220d refactor(auth): separate SSO support from enterprise edition
Add dedicated isSSOSupported property to correctly identify when SSO
authentication is available, properly handling the 'msaas' edition
case separately from enterprise edition checks. This fixes SSO
visibility in the login interface.
2025-03-27 12:28:10 +01:00
GitHub Action
5ab30380b0 Increment chalice chart version 2025-03-26 17:48:08 +01:00
Taha Yassine Kraiem
fc86555644 refactor(chalice): changed user-journey 2025-03-26 17:18:17 +01:00
GitHub Action
2a3c611a27 Increment frontend chart version 2025-03-26 16:48:29 +01:00
Delirium
1d6fb0ae9e ui: shrink icons when no space, adjust player area for events export … (#3217)
* ui: shrink icons when no space, adjust player area for events export panel, fix panel size

* ui: rm log
2025-03-26 16:38:48 +01:00
GitHub Action
bef91a6136 Increment frontend chart version 2025-03-25 18:15:34 +01:00
Shekar Siri
1e2bd19d32 fix(dashboard): update filter condition in MetricsList
Change the filter type comparison from checking against 'all' to
checking against an empty string. This ensures proper filtering
behavior when filtering metrics in the dashboard component.
2025-03-25 18:10:13 +01:00
rjshrjndrn
3b58cb347e chore(http): remove default token_string
scripts/helmcharts/openreplay/charts/http/scripts/entrypoint.sh

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-03-24 19:31:01 +01:00
GitHub Action
ca4590501a Increment frontend chart version 2025-03-24 17:45:24 +01:00
Andrey Babushkin
fd12cc7585
fix(GraphQL): remove unused useTranslation hook (#3200) (#3206)
Co-authored-by: PiRDub <pirddeveloppeur@gmail.com>
2025-03-24 17:38:45 +01:00
rjshrjndrn
6abded53e0 feat(helm): add TOKEN_SECRET environment variable
Add TOKEN_SECRET environment variable to HTTP service deployment and
generate a random value for it in vars.yaml.

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-03-24 16:55:35 +01:00
GitHub Action
82c5e5e59d Increment frontend chart version 2025-03-24 14:34:51 +01:00
nick-delirium
c77b0cc4de ui: fixes for onboarding ui 2025-03-24 14:30:22 +01:00
nick-delirium
de344e62ef ui: onboarding fixes 2025-03-24 14:30:22 +01:00
Mehdi Osman
deb78a62c0
Increment frontend chart version (#3189)
Co-authored-by: GitHub Action <action@github.com>
2025-03-21 11:00:14 +01:00
Shekar Siri
0724cf05f0
fix(auth): remove unnecessary captcha token validation (#3188)
The token validation checks were redundant as the validation is already
handled by the captcha wrapper component. This change simplifies the
password reset flow while maintaining security.
2025-03-21 10:55:39 +01:00
GitHub Action
cc704f1bc3 Increment frontend chart version 2025-03-20 16:18:42 +01:00
nick-delirium
4c159b2d26 ui: fix table column export 2025-03-20 16:08:58 +01:00
Mehdi Osman
42df33bc01
Increment assist chart version (#3181)
Co-authored-by: GitHub Action <action@github.com>
2025-03-19 14:58:26 +01:00
Alexander
ae95b48760
feat(assist): improved caching mechanism for cluster mode (#3180) 2025-03-19 14:53:58 +01:00
Mehdi Osman
4be3050e61
Increment frontend chart version (#3179)
Co-authored-by: GitHub Action <action@github.com>
2025-03-19 14:47:37 +01:00
Shekar Siri
8eec6e983b
feat(auth): implement withCaptcha HOC for consistent reCAPTCHA (#3177)
* feat(auth): implement withCaptcha HOC for consistent reCAPTCHA

This commit refactors the reCAPTCHA implementation across the application
by introducing a Higher Order Component (withCaptcha) that encapsulates
captcha verification logic. The changes:

- Create a reusable withCaptcha HOC in withRecaptcha.tsx
- Refactor Login, ResetPasswordRequest, and CreatePassword components
- Extract SSOLogin into a separate component
- Improve error handling and user feedback
- Standardize loading and verification states across forms
- Make captcha implementation more maintainable and consistent

* feat(auth): support msaas edition for enterprise features

Add msaas to the isEnterprise check alongside ee edition to properly
display enterprise features. Use userStore.isEnterprise in SSOLogin
component instead of directly checking authDetails.edition for
consistent
enterprise status detection.
2025-03-19 14:36:56 +01:00
Taha Yassine Kraiem
5fec615044 refactor(chalice): cleaned code
fix(chalice): fixed session-search-pg sortKey issue
fix(chalice): fixed CH-query-formatter to handle special chars
fix(chalice): fixed /ids response
2025-03-18 13:51:10 +01:00
Mehdi Osman
f77568a01c
Increment frontend chart version (#3167)
Co-authored-by: GitHub Action <action@github.com>
2025-03-18 13:45:09 +01:00
Shekar Siri
618e4dc59f
refactor(searchStore): reformat filterMap function parameters (#3166)
- Reformat the parameters of the filterMap function for better readability.
- Comment out the fetchSessions call in clearSearch method to avoid unnecessary session fetch.
2025-03-15 11:42:14 +01:00
301 changed files with 6493 additions and 8502 deletions

122
.github/workflows/assist-server-ee.yaml vendored Normal file
View file

@ -0,0 +1,122 @@
# This action will push the assist changes to aws
on:
workflow_dispatch:
inputs:
skip_security_checks:
description: "Skip Security checks if there is a unfixable vuln or error. Value: true/false"
required: false
default: "false"
push:
branches:
- dev
paths:
- "ee/assist-server/**"
name: Build and Deploy Assist-Server EE
jobs:
deploy:
name: Deploy
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
# We need to diff with old commit
# to see which workers got changed.
fetch-depth: 2
- uses: ./.github/composite-actions/update-keys
with:
assist_jwt_secret: ${{ secrets.ASSIST_JWT_SECRET }}
assist_key: ${{ secrets.ASSIST_KEY }}
domain_name: ${{ secrets.EE_DOMAIN_NAME }}
jwt_refresh_secret: ${{ secrets.JWT_REFRESH_SECRET }}
jwt_secret: ${{ secrets.EE_JWT_SECRET }}
jwt_spot_refresh_secret: ${{ secrets.JWT_SPOT_REFRESH_SECRET }}
jwt_spot_secret: ${{ secrets.JWT_SPOT_SECRET }}
license_key: ${{ secrets.EE_LICENSE_KEY }}
minio_access_key: ${{ secrets.EE_MINIO_ACCESS_KEY }}
minio_secret_key: ${{ secrets.EE_MINIO_SECRET_KEY }}
pg_password: ${{ secrets.EE_PG_PASSWORD }}
registry_url: ${{ secrets.OSS_REGISTRY_URL }}
name: Update Keys
- name: Docker login
run: |
docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}"
- uses: azure/k8s-set-context@v1
with:
method: kubeconfig
kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext
- name: Building and Pushing Assist-Server image
id: build-image
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee
ENVIRONMENT: staging
run: |
skip_security_checks=${{ github.event.inputs.skip_security_checks }}
cd assist-server
PUSH_IMAGE=0 bash -x ./build.sh ee
[[ "x$skip_security_checks" == "xtrue" ]] || {
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.56.2/trivy_0.56.2_Linux-64bit.tar.gz | tar -xzf - -C ./
images=("assist-server")
for image in ${images[*]};do
./trivy image --db-repository ghcr.io/aquasecurity/trivy-db:2 --db-repository public.ecr.aws/aquasecurity/trivy-db:2 --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
done
err_code=$?
[[ $err_code -ne 0 ]] && {
exit $err_code
}
} && {
echo "Skipping Security Checks"
}
images=("assist-server")
for image in ${images[*]};do
docker push $DOCKER_REPO/$image:$IMAGE_TAG
done
- name: Creating old image input
run: |
#
# Create yaml with existing image tags
#
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
echo > /tmp/image_override.yaml
for line in `cat /tmp/image_tag.txt`;
do
image_array=($(echo "$line" | tr ':' '\n'))
cat <<EOF >> /tmp/image_override.yaml
${image_array[0]}:
image:
# We've to strip off the -ee, as helm will append it.
tag: `echo ${image_array[1]} | cut -d '-' -f 1`
EOF
done
- name: Deploy to kubernetes
run: |
pwd
cd scripts/helmcharts/
# Update changed image tag
sed -i "/assist-server/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
cat /tmp/image_override.yaml
# Deploy command
mkdir -p /tmp/charts
mv openreplay/charts/{ingress-nginx,assist-server,quickwit,connector} /tmp/charts/
rm -rf openreplay/charts/*
mv /tmp/charts/* openreplay/charts/
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# We're not passing -ee flag, because helm will add that.
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging

189
.github/workflows/patch-build-old.yaml vendored Normal file
View file

@ -0,0 +1,189 @@
# Ref: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions
on:
workflow_dispatch:
inputs:
services:
description: 'Comma separated names of services to build(in small letters).'
required: true
default: 'chalice,frontend'
tag:
description: 'Tag to update.'
required: true
type: string
branch:
description: 'Branch to build patches from. Make sure the branch is uptodate with tag. Else itll cause missing commits.'
required: true
type: string
name: Build patches from tag, rewrite commit HEAD to older timestamp, and Push the tag
jobs:
deploy:
name: Build Patch from old tag
runs-on: ubuntu-latest
env:
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
steps:
- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 4
ref: ${{ github.event.inputs.tag }}
- name: Set Remote with GITHUB_TOKEN
run: |
git config --unset http.https://github.com/.extraheader
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git
- name: Create backup tag with timestamp
run: |
set -e # Exit immediately if a command exits with a non-zero status
TIMESTAMP=$(date +%Y%m%d%H%M%S)
BACKUP_TAG="${{ github.event.inputs.tag }}-backup-${TIMESTAMP}"
echo "BACKUP_TAG=${BACKUP_TAG}" >> $GITHUB_ENV
echo "INPUT_TAG=${{ github.event.inputs.tag }}" >> $GITHUB_ENV
git tag $BACKUP_TAG || { echo "Failed to create backup tag"; exit 1; }
git push origin $BACKUP_TAG || { echo "Failed to push backup tag"; exit 1; }
echo "Created backup tag: $BACKUP_TAG"
# Get the oldest commit date from the last 3 commits in raw format
OLDEST_COMMIT_TIMESTAMP=$(git log -3 --pretty=format:"%at" | tail -1)
echo "Oldest commit timestamp: $OLDEST_COMMIT_TIMESTAMP"
# Add 1 second to the timestamp
NEW_TIMESTAMP=$((OLDEST_COMMIT_TIMESTAMP + 1))
echo "NEW_TIMESTAMP=$NEW_TIMESTAMP" >> $GITHUB_ENV
- name: Setup yq
uses: mikefarah/yq@master
# Configure AWS credentials for the first registry
- name: Configure AWS credentials for RELEASE_ARM_REGISTRY
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_DEPOT_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AWS_DEPOT_SECRET_KEY }}
aws-region: ${{ secrets.AWS_DEPOT_DEFAULT_REGION }}
- name: Login to Amazon ECR for RELEASE_ARM_REGISTRY
id: login-ecr-arm
run: |
aws ecr get-login-password --region ${{ secrets.AWS_DEPOT_DEFAULT_REGION }} | docker login --username AWS --password-stdin ${{ secrets.RELEASE_ARM_REGISTRY }}
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }}
- uses: depot/setup-action@v1
- name: Get HEAD Commit ID
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
- name: Define Branch Name
run: echo "BRANCH_NAME=${{inputs.branch}}" >> $GITHUB_ENV
- name: Build
id: build-image
env:
DOCKER_REPO_ARM: ${{ secrets.RELEASE_ARM_REGISTRY }}
DOCKER_REPO_OSS: ${{ secrets.RELEASE_OSS_REGISTRY }}
MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
MSAAS_REPO_FOLDER: /tmp/msaas
run: |
set -exo pipefail
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
git checkout -b $BRANCH_NAME
working_dir=$(pwd)
function image_version(){
local service=$1
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
current_version=$(yq eval '.AppVersion' $chart_path)
new_version=$(echo $current_version | awk -F. '{$NF += 1 ; print $1"."$2"."$3}')
echo $new_version
# yq eval ".AppVersion = \"$new_version\"" -i $chart_path
}
function clone_msaas() {
[ -d $MSAAS_REPO_FOLDER ] || {
git clone -b $INPUT_TAG --recursive https://x-access-token:$MSAAS_REPO_CLONE_TOKEN@$MSAAS_REPO_URL $MSAAS_REPO_FOLDER
cd $MSAAS_REPO_FOLDER
cd openreplay && git fetch origin && git checkout $INPUT_TAG
git log -1
cd $MSAAS_REPO_FOLDER
bash git-init.sh
git checkout
}
}
function build_managed() {
local service=$1
local version=$2
echo building managed
clone_msaas
if [[ $service == 'chalice' ]]; then
cd $MSAAS_REPO_FOLDER/openreplay/api
else
cd $MSAAS_REPO_FOLDER/openreplay/$service
fi
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh >> /tmp/arm.txt
}
# Checking for backend images
ls backend/cmd >> /tmp/backend.txt
echo Services: "${{ github.event.inputs.services }}"
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}"
BUILD_SCRIPT_NAME="build.sh"
# Build FOSS
for SERVICE in "${SERVICES[@]}"; do
# Check if service is backend
if grep -q $SERVICE /tmp/backend.txt; then
cd backend
foss_build_args="nil $SERVICE"
ee_build_args="ee $SERVICE"
else
[[ $SERVICE == 'chalice' || $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && cd $working_dir/api || cd $SERVICE
[[ $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && BUILD_SCRIPT_NAME="build_${SERVICE}.sh"
ee_build_args="ee"
fi
version=$(image_version $SERVICE)
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
echo IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
if [[ "$SERVICE" != "chalice" && "$SERVICE" != "frontend" ]]; then
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
else
build_managed $SERVICE $version
fi
cd $working_dir
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$SERVICE/Chart.yaml"
yq eval ".AppVersion = \"$version\"" -i $chart_path
git add $chart_path
git commit -m "Increment $SERVICE chart version"
done
- name: Change commit timestamp
run: |
# Convert the timestamp to a date format git can understand
NEW_DATE=$(perl -le 'print scalar gmtime($ARGV[0])." +0000"' $NEW_TIMESTAMP)
echo "Setting commit date to: $NEW_DATE"
# Amend the commit with the new date
GIT_COMMITTER_DATE="$NEW_DATE" git commit --amend --no-edit --date="$NEW_DATE"
# Verify the change
git log -1 --pretty=format:"Commit now dated: %cD"
# git tag and push
git tag $INPUT_TAG -f
git push origin $INPUT_TAG -f
# - name: Debug Job
# if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3
# env:
# DOCKER_REPO_ARM: ${{ secrets.RELEASE_ARM_REGISTRY }}
# DOCKER_REPO_OSS: ${{ secrets.RELEASE_OSS_REGISTRY }}
# MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
# MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
# MSAAS_REPO_FOLDER: /tmp/msaas
# with:
# limit-access-to-actor: true

View file

@ -2,7 +2,6 @@
on: on:
workflow_dispatch: workflow_dispatch:
description: 'This workflow will build for patches for latest tag, and will Always use commit from main branch.'
inputs: inputs:
services: services:
description: 'Comma separated names of services to build(in small letters).' description: 'Comma separated names of services to build(in small letters).'
@ -20,12 +19,20 @@ jobs:
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }} DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v2 uses: actions/checkout@v4
with: with:
fetch-depth: 1 fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Rebase with main branch, to make sure the code has latest main changes - name: Rebase with main branch, to make sure the code has latest main changes
if: github.ref != 'refs/heads/main'
run: | run: |
git pull --rebase origin main git remote -v
git config --global user.email "action@github.com"
git config --global user.name "GitHub Action"
git config --global rebase.autoStash true
git fetch origin main:main
git rebase main
git log -3
- name: Downloading yq - name: Downloading yq
run: | run: |
@ -48,6 +55,8 @@ jobs:
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }} aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }}
- uses: depot/setup-action@v1 - uses: depot/setup-action@v1
env:
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
- name: Get HEAD Commit ID - name: Get HEAD Commit ID
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
- name: Define Branch Name - name: Define Branch Name
@ -65,78 +74,168 @@ jobs:
MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }} MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }} MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
MSAAS_REPO_FOLDER: /tmp/msaas MSAAS_REPO_FOLDER: /tmp/msaas
SERVICES_INPUT: ${{ github.event.inputs.services }}
run: | run: |
set -exo pipefail #!/bin/bash
git config --local user.email "action@github.com" set -euo pipefail
git config --local user.name "GitHub Action"
git checkout -b $BRANCH_NAME # Configuration
working_dir=$(pwd) readonly WORKING_DIR=$(pwd)
function image_version(){ readonly BUILD_SCRIPT_NAME="build.sh"
local service=$1 readonly BACKEND_SERVICES_FILE="/tmp/backend.txt"
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
current_version=$(yq eval '.AppVersion' $chart_path) # Initialize git configuration
new_version=$(echo $current_version | awk -F. '{$NF += 1 ; print $1"."$2"."$3}') setup_git() {
echo $new_version git config --local user.email "action@github.com"
# yq eval ".AppVersion = \"$new_version\"" -i $chart_path git config --local user.name "GitHub Action"
git checkout -b "$BRANCH_NAME"
} }
function clone_msaas() {
[ -d $MSAAS_REPO_FOLDER ] || { # Get and increment image version
git clone -b dev --recursive https://x-access-token:$MSAAS_REPO_CLONE_TOKEN@$MSAAS_REPO_URL $MSAAS_REPO_FOLDER image_version() {
cd $MSAAS_REPO_FOLDER local service=$1
cd openreplay && git fetch origin && git checkout main # This have to be changed to specific tag local chart_path="$WORKING_DIR/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
git log -1 local current_version new_version
cd $MSAAS_REPO_FOLDER
bash git-init.sh current_version=$(yq eval '.AppVersion' "$chart_path")
git checkout new_version=$(echo "$current_version" | awk -F. '{$NF += 1; print $1"."$2"."$3}')
} echo "$new_version"
} }
function build_managed() {
local service=$1 # Clone MSAAS repository if not exists
local version=$2 clone_msaas() {
echo building managed if [[ ! -d "$MSAAS_REPO_FOLDER" ]]; then
clone_msaas git clone -b dev --recursive "https://x-access-token:${MSAAS_REPO_CLONE_TOKEN}@${MSAAS_REPO_URL}" "$MSAAS_REPO_FOLDER"
if [[ $service == 'chalice' ]]; then cd "$MSAAS_REPO_FOLDER"
cd $MSAAS_REPO_FOLDER/openreplay/api cd openreplay && git fetch origin && git checkout main
else git log -1
cd $MSAAS_REPO_FOLDER/openreplay/$service cd "$MSAAS_REPO_FOLDER"
fi bash git-init.sh
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh >> /tmp/arm.txt git checkout
fi
} }
# Checking for backend images
ls backend/cmd >> /tmp/backend.txt # Build managed services
echo Services: "${{ github.event.inputs.services }}" build_managed() {
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}" local service=$1
BUILD_SCRIPT_NAME="build.sh" local version=$2
# Build FOSS
for SERVICE in "${SERVICES[@]}"; do echo "Building managed service: $service"
# Check if service is backend clone_msaas
if grep -q $SERVICE /tmp/backend.txt; then
cd backend if [[ $service == 'chalice' ]]; then
foss_build_args="nil $SERVICE" cd "$MSAAS_REPO_FOLDER/openreplay/api"
ee_build_args="ee $SERVICE" else
else cd "$MSAAS_REPO_FOLDER/openreplay/$service"
[[ $SERVICE == 'chalice' || $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && cd $working_dir/api || cd $SERVICE fi
[[ $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && BUILD_SCRIPT_NAME="build_${SERVICE}.sh"
ee_build_args="ee" local build_cmd="IMAGE_TAG=$version DOCKER_RUNTIME=depot DOCKER_BUILD_ARGS=--push ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh"
fi
version=$(image_version $SERVICE) echo "Executing: $build_cmd"
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args if ! eval "$build_cmd" 2>&1; then
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args echo "Build failed for $service"
echo IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args exit 1
IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args fi
if [[ "$SERVICE" != "chalice" && "$SERVICE" != "frontend" ]]; then }
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args # Build service with given arguments
else build_service() {
build_managed $SERVICE $version local service=$1
fi local version=$2
cd $working_dir local build_args=$3
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$SERVICE/Chart.yaml" local build_script=${4:-$BUILD_SCRIPT_NAME}
yq eval ".AppVersion = \"$version\"" -i $chart_path
git add $chart_path local command="IMAGE_TAG=$version DOCKER_RUNTIME=depot DOCKER_BUILD_ARGS=--push ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash $build_script $build_args"
git commit -m "Increment $SERVICE chart version" echo "Executing: $command"
git push --set-upstream origin $BRANCH_NAME eval "$command"
done }
# Update chart version and commit changes
update_chart_version() {
local service=$1
local version=$2
local chart_path="$WORKING_DIR/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
# Ensure we're in the original working directory/repository
cd "$WORKING_DIR"
yq eval ".AppVersion = \"$version\"" -i "$chart_path"
git add "$chart_path"
git commit -m "Increment $service chart version to $version"
git push --set-upstream origin "$BRANCH_NAME"
cd -
}
# Main execution
main() {
setup_git
# Get backend services list
ls backend/cmd >"$BACKEND_SERVICES_FILE"
# Parse services input (fix for GitHub Actions syntax)
echo "Services: ${SERVICES_INPUT:-$1}"
IFS=',' read -ra services <<<"${SERVICES_INPUT:-$1}"
# Process each service
for service in "${services[@]}"; do
echo "Processing service: $service"
cd "$WORKING_DIR"
local foss_build_args="" ee_build_args="" build_script="$BUILD_SCRIPT_NAME"
# Determine build configuration based on service type
if grep -q "$service" "$BACKEND_SERVICES_FILE"; then
# Backend service
cd backend
foss_build_args="nil $service"
ee_build_args="ee $service"
else
# Non-backend service
case "$service" in
chalice | alerts | crons)
cd "$WORKING_DIR/api"
;;
*)
cd "$service"
;;
esac
# Special build scripts for alerts/crons
if [[ $service == 'alerts' || $service == 'crons' ]]; then
build_script="build_${service}.sh"
fi
ee_build_args="ee"
fi
# Get version and build
local version
version=$(image_version "$service")
# Build FOSS and EE versions
build_service "$service" "$version" "$foss_build_args"
build_service "$service" "${version}-ee" "$ee_build_args"
# Build managed version for specific services
if [[ "$service" != "chalice" && "$service" != "frontend" ]]; then
echo "Nothing to build in managed for service $service"
else
build_managed "$service" "$version"
fi
# Update chart and commit
update_chart_version "$service" "$version"
done
cd "$WORKING_DIR"
# Cleanup
rm -f "$BACKEND_SERVICES_FILE"
}
echo "Working directory: $WORKING_DIR"
# Run main function with all arguments
main "$SERVICES_INPUT"
- name: Create Pull Request - name: Create Pull Request
uses: repo-sync/pull-request@v2 uses: repo-sync/pull-request@v2
@ -147,8 +246,7 @@ jobs:
pr_title: "Updated patch build from main ${{ env.HEAD_COMMIT_ID }}" pr_title: "Updated patch build from main ${{ env.HEAD_COMMIT_ID }}"
pr_body: | pr_body: |
This PR updates the Helm chart version after building the patch from $HEAD_COMMIT_ID. This PR updates the Helm chart version after building the patch from $HEAD_COMMIT_ID.
Once this PR is merged, To update the latest tag, run the following workflow. Once this PR is merged, tag update job will run automatically.
https://github.com/openreplay/openreplay/actions/workflows/update-tag.yaml
# - name: Debug Job # - name: Debug Job
# if: ${{ failure() }} # if: ${{ failure() }}

View file

@ -18,127 +18,50 @@ jobs:
- uses: oven-sh/setup-bun@v2 - uses: oven-sh/setup-bun@v2
with: with:
bun-version: latest bun-version: latest
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Cache tracker modules - name: Cache tracker modules
uses: actions/cache@v3 uses: actions/cache@v3
with: with:
path: tracker/node_modules path: tracker/tracker/node_modules
key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/bun.lock') }} key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/bun.lockb') }}
restore-keys: |
test_tracker_build{{ runner.OS }}-build-
test_tracker_build{{ runner.OS }}-
- name: Cache tracker-assist modules
uses: actions/cache@v3
with:
path: tracker/tracker-assist/node_modules
key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/bun.lockb') }}
restore-keys: | restore-keys: |
test_tracker_build{{ runner.OS }}-build- test_tracker_build{{ runner.OS }}-build-
test_tracker_build{{ runner.OS }}- test_tracker_build{{ runner.OS }}-
- name: Setup Testing packages - name: Setup Testing packages
run: | run: |
cd tracker cd tracker/tracker
bun install bun install
# Run tracker tests and generate coverage
- name: Jest tests - name: Jest tests
run: | run: |
cd tracker/tracker cd tracker/tracker
bun run test:ci bun run test:ci
- name: Building test - name: Building test
run: | run: |
cd tracker/tracker cd tracker/tracker
bun run build bun run build
- name: (TA) Setup Testing packages
# Run tracker-assist tests and generate coverage run: |
cd tracker/tracker-assist
bun install
- name: (TA) Jest tests - name: (TA) Jest tests
run: | run: |
cd tracker/tracker-assist cd tracker/tracker-assist
bun run test:ci bun run test:ci
- name: (TA) Building test - name: (TA) Building test
run: | run: |
cd tracker/tracker-assist cd tracker/tracker-assist
bun run build bun run build
- name: Upload coverage reports to Codecov
# For PRs, parse coverage and create comment uses: codecov/codecov-action@v3
- name: Parse Coverage Reports
if: github.event_name == 'pull_request'
id: coverage-report
run: |
echo "TRACKER_COVERAGE=$(cd tracker/tracker && jq -r '.total.lines.pct' coverage/coverage-summary.json)" >> $GITHUB_ENV
echo "TRACKER_STATEMENTS=$(cd tracker/tracker && jq -r '.total.statements.pct' coverage/coverage-summary.json)" >> $GITHUB_ENV
echo "TRACKER_BRANCHES=$(cd tracker/tracker && jq -r '.total.branches.pct' coverage/coverage-summary.json)" >> $GITHUB_ENV
echo "TRACKER_FUNCTIONS=$(cd tracker/tracker && jq -r '.total.functions.pct' coverage/coverage-summary.json)" >> $GITHUB_ENV
echo "TA_COVERAGE=$(cd tracker/tracker-assist && jq -r '.total.lines.pct' coverage/coverage-summary.json)" >> $GITHUB_ENV
echo "TA_STATEMENTS=$(cd tracker/tracker-assist && jq -r '.total.statements.pct' coverage/coverage-summary.json)" >> $GITHUB_ENV
echo "TA_BRANCHES=$(cd tracker/tracker-assist && jq -r '.total.branches.pct' coverage/coverage-summary.json)" >> $GITHUB_ENV
echo "TA_FUNCTIONS=$(cd tracker/tracker-assist && jq -r '.total.functions.pct' coverage/coverage-summary.json)" >> $GITHUB_ENV
# For PRs, check out base branch for comparison
- name: Check out base branch for coverage comparison
if: github.event_name == 'pull_request'
uses: actions/checkout@v4
with: with:
ref: ${{ github.base_ref }} token: ${{ secrets.CODECOV_TOKEN }}
path: dev flags: tracker
iame: tracker
- name: Setup base branch for comparison
if: github.event_name == 'pull_request'
run: |
cd tracker/tracker
bun install
# Calculate coverage for base branch
- name: Get base branch coverage
if: github.event_name == 'pull_request'
id: base-coverage
run: |
cd tracker/tracker
bun run test:ci
echo "BASE_TRACKER_COVERAGE=$(jq -r '.total.lines.pct' coverage/coverage-summary.json)" >> $GITHUB_ENV
echo "BASE_TRACKER_STATEMENTS=$(jq -r '.total.statements.pct' coverage/coverage-summary.json)" >> $GITHUB_ENV
echo "BASE_TRACKER_BRANCHES=$(jq -r '.total.branches.pct' coverage/coverage-summary.json)" >> $GITHUB_ENV
echo "BASE_TRACKER_FUNCTIONS=$(jq -r '.total.functions.pct' coverage/coverage-summary.json)" >> $GITHUB_ENV
cd ../tracker-assist
bun run test:ci
echo "BASE_TA_COVERAGE=$(jq -r '.total.lines.pct' coverage/coverage-summary.json)" >> $GITHUB_ENV
echo "BASE_TA_STATEMENTS=$(jq -r '.total.statements.pct' coverage/coverage-summary.json)" >> $GITHUB_ENV
echo "BASE_TA_BRANCHES=$(jq -r '.total.branches.pct' coverage/coverage-summary.json)" >> $GITHUB_ENV
echo "BASE_TA_FUNCTIONS=$(jq -r '.total.functions.pct' coverage/coverage-summary.json)" >> $GITHUB_ENV
# Calculate coverage changes
- name: Calculate Coverage Changes
if: github.event_name == 'pull_request'
run: |
echo "TRACKER_COVERAGE_CHANGE=$(echo "$TRACKER_COVERAGE - $BASE_TRACKER_COVERAGE" | bc)" >> $GITHUB_ENV
echo "TRACKER_STATEMENTS_CHANGE=$(echo "$TRACKER_STATEMENTS - $BASE_TRACKER_STATEMENTS" | bc)" >> $GITHUB_ENV
echo "TRACKER_BRANCHES_CHANGE=$(echo "$TRACKER_BRANCHES - $BASE_TRACKER_BRANCHES" | bc)" >> $GITHUB_ENV
echo "TRACKER_FUNCTIONS_CHANGE=$(echo "$TRACKER_FUNCTIONS - $BASE_TRACKER_FUNCTIONS" | bc)" >> $GITHUB_ENV
echo "TA_COVERAGE_CHANGE=$(echo "$TA_COVERAGE - $BASE_TA_COVERAGE" | bc)" >> $GITHUB_ENV
echo "TA_STATEMENTS_CHANGE=$(echo "$TA_STATEMENTS - $BASE_TA_STATEMENTS" | bc)" >> $GITHUB_ENV
echo "TA_BRANCHES_CHANGE=$(echo "$TA_BRANCHES - $BASE_TA_BRANCHES" | bc)" >> $GITHUB_ENV
echo "TA_FUNCTIONS_CHANGE=$(echo "$TA_FUNCTIONS - $BASE_TA_FUNCTIONS" | bc)" >> $GITHUB_ENV
# Post coverage summary as PR comment
- name: Create Coverage PR Comment
if: github.event_name == 'pull_request'
uses: marocchino/sticky-pull-request-comment@v2
with:
header: coverage-report
message: |
## Code Coverage Report 📊
### Tracker
| Metric | Current | Base | Change |
| ------ | ------- | ---- | ------ |
| Lines | ${{ env.TRACKER_COVERAGE }}% | ${{ env.BASE_TRACKER_COVERAGE }}% | ${{ env.TRACKER_COVERAGE_CHANGE >= 0 && '+' || '' }}${{ env.TRACKER_COVERAGE_CHANGE }}% |
| Statements | ${{ env.TRACKER_STATEMENTS }}% | ${{ env.BASE_TRACKER_STATEMENTS }}% | ${{ env.TRACKER_STATEMENTS_CHANGE >= 0 && '+' || '' }}${{ env.TRACKER_STATEMENTS_CHANGE }}% |
| Branches | ${{ env.TRACKER_BRANCHES }}% | ${{ env.BASE_TRACKER_BRANCHES }}% | ${{ env.TRACKER_BRANCHES_CHANGE >= 0 && '+' || '' }}${{ env.TRACKER_BRANCHES_CHANGE }}% |
| Functions | ${{ env.TRACKER_FUNCTIONS }}% | ${{ env.BASE_TRACKER_FUNCTIONS }}% | ${{ env.TRACKER_FUNCTIONS_CHANGE >= 0 && '+' || '' }}${{ env.TRACKER_FUNCTIONS_CHANGE }}% |
### Tracker-Assist
| Metric | Current | Base | Change |
| ------ | ------- | ---- | ------ |
| Lines | ${{ env.TA_COVERAGE }}% | ${{ env.BASE_TA_COVERAGE }}% | ${{ env.TA_COVERAGE_CHANGE >= 0 && '+' || '' }}${{ env.TA_COVERAGE_CHANGE }}% |
| Statements | ${{ env.TA_STATEMENTS }}% | ${{ env.BASE_TA_STATEMENTS }}% | ${{ env.TA_STATEMENTS_CHANGE >= 0 && '+' || '' }}${{ env.TA_STATEMENTS_CHANGE }}% |
| Branches | ${{ env.TA_BRANCHES }}% | ${{ env.BASE_TA_BRANCHES }}% | ${{ env.TA_BRANCHES_CHANGE >= 0 && '+' || '' }}${{ env.TA_BRANCHES_CHANGE }}% |
| Functions | ${{ env.TA_FUNCTIONS }}% | ${{ env.BASE_TA_FUNCTIONS }}% | ${{ env.TA_FUNCTIONS_CHANGE >= 0 && '+' || '' }}${{ env.TA_FUNCTIONS_CHANGE }}% |
[View detailed coverage report](${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID})

View file

@ -1,35 +1,42 @@
on: on:
workflow_dispatch: pull_request:
description: "This workflow will build for patches for latest tag, and will Always use commit from main branch." types: [closed]
inputs: branches:
services: - main
description: "This action will update the latest tag with current main branch HEAD. Should I proceed ? true/false" name: Release tag update --force
required: true
default: "false"
name: Force Push tag with main branch HEAD
jobs: jobs:
deploy: deploy:
name: Build Patch from main name: Build Patch from main
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: if: ${{ (github.event_name == 'pull_request' && github.event.pull_request.merged == true) || github.event.inputs.services == 'true' }}
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v2 uses: actions/checkout@v2
- name: Get latest release tag using GitHub API
id: get-latest-tag
run: |
LATEST_TAG=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
"https://api.github.com/repos/${{ github.repository }}/releases/latest" \
| jq -r .tag_name)
# Fallback to git command if API doesn't return a tag
if [ "$LATEST_TAG" == "null" ] || [ -z "$LATEST_TAG" ]; then
echo "Not found latest tag"
exit 100
fi
echo "LATEST_TAG=$LATEST_TAG" >> $GITHUB_ENV
echo "Latest tag: $LATEST_TAG"
- name: Set Remote with GITHUB_TOKEN - name: Set Remote with GITHUB_TOKEN
run: | run: |
git config --unset http.https://github.com/.extraheader git config --unset http.https://github.com/.extraheader
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}
- name: Push main branch to tag - name: Push main branch to tag
run: | run: |
git fetch --tags
git checkout main git checkout main
git push origin HEAD:refs/tags/$(git tag --list 'v[0-9]*' --sort=-v:refname | head -n 1) --force echo "Updating tag ${{ env.LATEST_TAG }} to point to latest commit on main"
# - name: Debug Job git push origin HEAD:refs/tags/${{ env.LATEST_TAG }} --force
# if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3
# with:
# limit-access-to-actor: true

1
.gitignore vendored
View file

@ -7,4 +7,3 @@ node_modules
**/*.envrc **/*.envrc
.idea .idea
*.mob* *.mob*
install-state.gz

View file

@ -6,15 +6,16 @@ name = "pypi"
[packages] [packages]
urllib3 = "==2.3.0" urllib3 = "==2.3.0"
requests = "==2.32.3" requests = "==2.32.3"
boto3 = "==1.37.21" boto3 = "==1.36.12"
pyjwt = "==2.10.1" pyjwt = "==2.10.1"
psycopg2-binary = "==2.9.10" psycopg2-binary = "==2.9.10"
psycopg = {extras = ["pool", "binary"], version = "==3.2.6"} psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
clickhouse-connect = "==0.8.15" clickhouse-connect = "==0.8.15"
elasticsearch = "==8.17.2" elasticsearch = "==8.17.1"
jira = "==3.8.0" jira = "==3.8.0"
cachetools = "==5.5.2" cachetools = "==5.5.1"
fastapi = "==0.115.12" fastapi = "==0.115.8"
uvicorn = {extras = ["standard"], version = "==0.34.0"} uvicorn = {extras = ["standard"], version = "==0.34.0"}
python-decouple = "==3.8" python-decouple = "==3.8"
pydantic = {extras = ["email"], version = "==2.10.6"} pydantic = {extras = ["email"], version = "==2.10.6"}

View file

@ -16,7 +16,7 @@ from chalicelib.utils import helper
from chalicelib.utils import pg_client, ch_client from chalicelib.utils import pg_client, ch_client
from crons import core_crons, core_dynamic_crons from crons import core_crons, core_dynamic_crons
from routers import core, core_dynamic from routers import core, core_dynamic
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_analytics from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
loglevel = config("LOGLEVEL", default=logging.WARNING) loglevel = config("LOGLEVEL", default=logging.WARNING)
print(f">Loglevel set to: {loglevel}") print(f">Loglevel set to: {loglevel}")
@ -129,6 +129,6 @@ app.include_router(spot.public_app)
app.include_router(spot.app) app.include_router(spot.app)
app.include_router(spot.app_apikey) app.include_router(spot.app_apikey)
app.include_router(product_analytics.public_app, prefix="/pa") app.include_router(product_anaytics.public_app)
app.include_router(product_analytics.app, prefix="/pa") app.include_router(product_anaytics.app)
app.include_router(product_analytics.app_apikey, prefix="/pa") app.include_router(product_anaytics.app_apikey)

View file

@ -85,7 +85,8 @@ def __generic_query(typename, value_length=None):
ORDER BY value""" ORDER BY value"""
if value_length is None or value_length > 2: if value_length is None or value_length > 2:
return f"""(SELECT DISTINCT value, type return f"""SELECT DISTINCT ON(value,type) value, type
((SELECT DISTINCT value, type
FROM {TABLE} FROM {TABLE}
WHERE WHERE
project_id = %(project_id)s project_id = %(project_id)s
@ -101,7 +102,7 @@ def __generic_query(typename, value_length=None):
AND type='{typename.upper()}' AND type='{typename.upper()}'
AND value ILIKE %(value)s AND value ILIKE %(value)s
ORDER BY value ORDER BY value
LIMIT 5);""" LIMIT 5)) AS raw;"""
return f"""SELECT DISTINCT value, type return f"""SELECT DISTINCT value, type
FROM {TABLE} FROM {TABLE}
WHERE WHERE
@ -326,7 +327,7 @@ def __search_metadata(project_id, value, key=None, source=None):
AND {colname} ILIKE %(svalue)s LIMIT 5)""") AND {colname} ILIKE %(svalue)s LIMIT 5)""")
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\ cur.execute(cur.mogrify(f"""\
SELECT key, value, 'METADATA' AS TYPE SELECT DISTINCT ON(key, value) key, value, 'METADATA' AS TYPE
FROM({" UNION ALL ".join(sub_from)}) AS all_metas FROM({" UNION ALL ".join(sub_from)}) AS all_metas
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value), LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)})) "svalue": helper.string_to_sql_like("^" + value)}))

View file

@ -338,14 +338,14 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
SELECT details.error_id as error_id, SELECT details.error_id as error_id,
name, message, users, total, name, message, users, total,
sessions, last_occurrence, first_occurrence, chart sessions, last_occurrence, first_occurrence, chart
FROM (SELECT JSONExtractString(toString(`$properties`), 'error_id') AS error_id, FROM (SELECT error_id,
JSONExtractString(toString(`$properties`), 'name') AS name, JSONExtractString(toString(`$properties`), 'name') AS name,
JSONExtractString(toString(`$properties`), 'message') AS message, JSONExtractString(toString(`$properties`), 'message') AS message,
COUNT(DISTINCT user_id) AS users, COUNT(DISTINCT user_id) AS users,
COUNT(DISTINCT events.session_id) AS sessions, COUNT(DISTINCT events.session_id) AS sessions,
MAX(created_at) AS max_datetime, MAX(created_at) AS max_datetime,
MIN(created_at) AS min_datetime, MIN(created_at) AS min_datetime,
COUNT(DISTINCT JSONExtractString(toString(`$properties`), 'error_id')) COUNT(DISTINCT error_id)
OVER() AS total OVER() AS total
FROM {MAIN_EVENTS_TABLE} AS events FROM {MAIN_EVENTS_TABLE} AS events
INNER JOIN (SELECT session_id, coalesce(user_id,toString(user_uuid)) AS user_id INNER JOIN (SELECT session_id, coalesce(user_id,toString(user_uuid)) AS user_id
@ -357,7 +357,7 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
GROUP BY error_id, name, message GROUP BY error_id, name, message
ORDER BY {sort} {order} ORDER BY {sort} {order}
LIMIT %(errors_limit)s OFFSET %(errors_offset)s) AS details LIMIT %(errors_limit)s OFFSET %(errors_offset)s) AS details
INNER JOIN (SELECT JSONExtractString(toString(`$properties`), 'error_id') AS error_id, INNER JOIN (SELECT error_id,
toUnixTimestamp(MAX(created_at))*1000 AS last_occurrence, toUnixTimestamp(MAX(created_at))*1000 AS last_occurrence,
toUnixTimestamp(MIN(created_at))*1000 AS first_occurrence toUnixTimestamp(MIN(created_at))*1000 AS first_occurrence
FROM {MAIN_EVENTS_TABLE} FROM {MAIN_EVENTS_TABLE}
@ -366,7 +366,7 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
GROUP BY error_id) AS time_details GROUP BY error_id) AS time_details
ON details.error_id=time_details.error_id ON details.error_id=time_details.error_id
INNER JOIN (SELECT error_id, groupArray([timestamp, count]) AS chart INNER JOIN (SELECT error_id, groupArray([timestamp, count]) AS chart
FROM (SELECT JSONExtractString(toString(`$properties`), 'error_id') AS error_id, FROM (SELECT error_id,
gs.generate_series AS timestamp, gs.generate_series AS timestamp,
COUNT(DISTINCT session_id) AS count COUNT(DISTINCT session_id) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS gs FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS gs

View file

@ -50,8 +50,8 @@ class JIRAIntegration(base.BaseIntegration):
cur.execute( cur.execute(
cur.mogrify( cur.mogrify(
"""SELECT username, token, url """SELECT username, token, url
FROM public.jira_cloud FROM public.jira_cloud
WHERE user_id=%(user_id)s;""", WHERE user_id = %(user_id)s;""",
{"user_id": self._user_id}) {"user_id": self._user_id})
) )
data = helper.dict_to_camel_case(cur.fetchone()) data = helper.dict_to_camel_case(cur.fetchone())
@ -95,10 +95,9 @@ class JIRAIntegration(base.BaseIntegration):
def add(self, username, token, url, obfuscate=False): def add(self, username, token, url, obfuscate=False):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
cur.execute( cur.execute(
cur.mogrify("""\ cur.mogrify(""" \
INSERT INTO public.jira_cloud(username, token, user_id,url) INSERT INTO public.jira_cloud(username, token, user_id, url)
VALUES (%(username)s, %(token)s, %(user_id)s,%(url)s) VALUES (%(username)s, %(token)s, %(user_id)s, %(url)s) RETURNING username, token, url;""",
RETURNING username, token, url;""",
{"user_id": self._user_id, "username": username, {"user_id": self._user_id, "username": username,
"token": token, "url": url}) "token": token, "url": url})
) )
@ -112,9 +111,10 @@ class JIRAIntegration(base.BaseIntegration):
def delete(self): def delete(self):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
cur.execute( cur.execute(
cur.mogrify("""\ cur.mogrify(""" \
DELETE FROM public.jira_cloud DELETE
WHERE user_id=%(user_id)s;""", FROM public.jira_cloud
WHERE user_id = %(user_id)s;""",
{"user_id": self._user_id}) {"user_id": self._user_id})
) )
return {"state": "success"} return {"state": "success"}
@ -125,7 +125,7 @@ class JIRAIntegration(base.BaseIntegration):
changes={ changes={
"username": data.username, "username": data.username,
"token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \ "token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \
else self.integration.token, else self.integration["token"],
"url": str(data.url) "url": str(data.url)
}, },
obfuscate=True obfuscate=True

View file

@ -241,25 +241,3 @@ def get_colname_by_key(project_id, key):
return None return None
return index_to_colname(meta_keys[key]) return index_to_colname(meta_keys[key])
def get_for_filters(project_id):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT {",".join(column_names())}
FROM public.projects
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
LIMIT 1;""", {"project_id": project_id})
cur.execute(query=query)
metas = cur.fetchone()
results = []
if metas is not None:
for i, k in enumerate(metas.keys()):
if metas[k] is not None:
results.append({"id": f"meta_{i}",
"name": k,
"displayName": metas[k],
"possibleTypes": ["String"],
"autoCaptured": False,
"icon": None})
return {"total": len(results), "list": results}

View file

@ -6,7 +6,7 @@ from chalicelib.utils import helper
from chalicelib.utils import sql_helper as sh from chalicelib.utils import sql_helper as sh
def filter_stages(stages: List[schemas.SessionSearchEventSchema]): def filter_stages(stages: List[schemas.SessionSearchEventSchema2]):
ALLOW_TYPES = [schemas.EventType.CLICK, schemas.EventType.INPUT, ALLOW_TYPES = [schemas.EventType.CLICK, schemas.EventType.INPUT,
schemas.EventType.LOCATION, schemas.EventType.CUSTOM, schemas.EventType.LOCATION, schemas.EventType.CUSTOM,
schemas.EventType.CLICK_MOBILE, schemas.EventType.INPUT_MOBILE, schemas.EventType.CLICK_MOBILE, schemas.EventType.INPUT_MOBILE,
@ -15,10 +15,10 @@ def filter_stages(stages: List[schemas.SessionSearchEventSchema]):
def __parse_events(f_events: List[dict]): def __parse_events(f_events: List[dict]):
return [schemas.SessionSearchEventSchema.parse_obj(e) for e in f_events] return [schemas.SessionSearchEventSchema2.parse_obj(e) for e in f_events]
def __fix_stages(f_events: List[schemas.SessionSearchEventSchema]): def __fix_stages(f_events: List[schemas.SessionSearchEventSchema2]):
if f_events is None: if f_events is None:
return return
events = [] events = []

View file

@ -160,7 +160,7 @@ s.start_ts,
s.duration""" s.duration"""
def __get_1_url(location_condition: schemas.SessionSearchEventSchema | None, session_id: str, project_id: int, def __get_1_url(location_condition: schemas.SessionSearchEventSchema2 | None, session_id: str, project_id: int,
start_time: int, start_time: int,
end_time: int) -> str | None: end_time: int) -> str | None:
full_args = { full_args = {
@ -240,13 +240,13 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i
value=[schemas.PlatformType.DESKTOP], value=[schemas.PlatformType.DESKTOP],
operator=schemas.SearchEventOperator.IS)) operator=schemas.SearchEventOperator.IS))
if not location_condition: if not location_condition:
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.LOCATION, data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
value=[], value=[],
operator=schemas.SearchEventOperator.IS_ANY)) operator=schemas.SearchEventOperator.IS_ANY))
if no_click: if no_click:
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.CLICK, data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.CLICK,
value=[], value=[],
operator=schemas.SearchEventOperator.IS_ANY)) operator=schemas.SearchEventOperator.IS_ANY))
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT, data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
value=[0], value=[0],

View file

@ -24,9 +24,8 @@ def get_by_url(project_id, data: schemas.GetHeatMapPayloadSchema):
"main_events.`$event_name` = 'CLICK'", "main_events.`$event_name` = 'CLICK'",
"isNotNull(JSON_VALUE(CAST(main_events.`$properties` AS String), '$.normalized_x'))" "isNotNull(JSON_VALUE(CAST(main_events.`$properties` AS String), '$.normalized_x'))"
] ]
if data.operator == schemas.SearchEventOperator.PATTERN:
constraints.append("match(main_events.`$properties`.url_path'.:String,%(url)s)") if data.operator == schemas.SearchEventOperator.IS:
elif data.operator == schemas.SearchEventOperator.IS:
constraints.append("JSON_VALUE(CAST(main_events.`$properties` AS String), '$.url_path') = %(url)s") constraints.append("JSON_VALUE(CAST(main_events.`$properties` AS String), '$.url_path') = %(url)s")
else: else:
constraints.append("JSON_VALUE(CAST(main_events.`$properties` AS String), '$.url_path') ILIKE %(url)s") constraints.append("JSON_VALUE(CAST(main_events.`$properties` AS String), '$.url_path') ILIKE %(url)s")
@ -180,7 +179,7 @@ toUnixTimestamp(s.datetime)*1000 AS start_ts,
s.duration AS duration""" s.duration AS duration"""
def __get_1_url(location_condition: schemas.SessionSearchEventSchema | None, session_id: str, project_id: int, def __get_1_url(location_condition: schemas.SessionSearchEventSchema2 | None, session_id: str, project_id: int,
start_time: int, start_time: int,
end_time: int) -> str | None: end_time: int) -> str | None:
full_args = { full_args = {
@ -263,13 +262,13 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i
value=[schemas.PlatformType.DESKTOP], value=[schemas.PlatformType.DESKTOP],
operator=schemas.SearchEventOperator.IS)) operator=schemas.SearchEventOperator.IS))
if not location_condition: if not location_condition:
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.LOCATION, data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
value=[], value=[],
operator=schemas.SearchEventOperator.IS_ANY)) operator=schemas.SearchEventOperator.IS_ANY))
if no_click: if no_click:
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.CLICK, data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.CLICK,
value=[], value=[],
operator=schemas.SearchEventOperator.IS_ANY)) operator=schemas.SearchEventOperator.IS_ANY))
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT, data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
value=[0], value=[0],

View file

@ -241,7 +241,7 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
:return: :return:
""" """
stages: List[schemas.SessionSearchEventSchema] = filter_d.events stages: List[schemas.SessionSearchEventSchema2] = filter_d.events
filters: List[schemas.SessionSearchFilterSchema] = filter_d.filters filters: List[schemas.SessionSearchFilterSchema] = filter_d.filters
stage_constraints = ["main.timestamp <= %(endTimestamp)s"] stage_constraints = ["main.timestamp <= %(endTimestamp)s"]

View file

@ -15,7 +15,7 @@ logger = logging.getLogger(__name__)
def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas.ProjectContext, def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas.ProjectContext,
metric_format: schemas.MetricExtendedFormatType) -> List[RealDictRow]: metric_format: schemas.MetricExtendedFormatType) -> List[RealDictRow]:
stages: List[schemas.SessionSearchEventSchema] = filter_d.events stages: List[schemas.SessionSearchEventSchema2] = filter_d.events
filters: List[schemas.SessionSearchFilterSchema] = filter_d.filters filters: List[schemas.SessionSearchFilterSchema] = filter_d.filters
platform = project.platform platform = project.platform
constraints = ["e.project_id = %(project_id)s", constraints = ["e.project_id = %(project_id)s",

View file

@ -0,0 +1,14 @@
from chalicelib.utils.ch_client import ClickHouseClient
def search_events(project_id: int, data: dict):
with ClickHouseClient() as ch_client:
r = ch_client.format(
"""SELECT *
FROM taha.events
WHERE project_id=%(project_id)s
ORDER BY created_at;""",
params={"project_id": project_id})
x = ch_client.execute(r)
return x

View file

@ -1,139 +0,0 @@
import logging
import schemas
from chalicelib.utils import helper
from chalicelib.utils import sql_helper as sh
from chalicelib.utils.ch_client import ClickHouseClient
from chalicelib.utils.exp_ch_helper import get_sub_condition
logger = logging.getLogger(__name__)
def get_events(project_id: int, page: schemas.PaginatedSchema):
with ClickHouseClient() as ch_client:
r = ch_client.format(
"""SELECT DISTINCT ON(event_name,auto_captured)
COUNT(1) OVER () AS total,
event_name AS name, display_name, description,
auto_captured
FROM product_analytics.all_events
WHERE project_id=%(project_id)s
ORDER BY auto_captured,display_name
LIMIT %(limit)s OFFSET %(offset)s;""",
parameters={"project_id": project_id, "limit": page.limit, "offset": (page.page - 1) * page.limit})
rows = ch_client.execute(r)
if len(rows) == 0:
return {"total": 0, "list": []}
total = rows[0]["total"]
for i, row in enumerate(rows):
row["id"] = f"event_{i}"
row["icon"] = None
row["possibleTypes"] = ["string"]
row.pop("total")
return {"total": total, "list": helper.list_to_camel_case(rows)}
def search_events(project_id: int, data: schemas.EventsSearchPayloadSchema):
with ClickHouseClient() as ch_client:
full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp,
"projectId": project_id, "limit": data.limit, "offset": (data.page - 1) * data.limit}
constraints = ["project_id = %(projectId)s",
"created_at >= toDateTime(%(startDate)s/1000)",
"created_at <= toDateTime(%(endDate)s/1000)"]
ev_constraints = []
for i, f in enumerate(data.filters):
if not f.is_event:
f.value = helper.values_for_operator(value=f.value, op=f.operator)
f_k = f"f_value{i}"
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
is_any = sh.isAny_opreator(f.operator)
is_undefined = sh.isUndefined_operator(f.operator)
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
if f.is_predefined:
column = f.name
else:
column = f"properties.{f.name}"
if is_any:
condition = f"notEmpty{column})"
elif is_undefined:
condition = f"empty({column})"
else:
condition = sh.multi_conditions(
get_sub_condition(col_name=column, val_name=f_k, operator=f.operator),
values=f.value, value_key=f_k)
constraints.append(condition)
else:
e_k = f"e_value{i}"
full_args = {**full_args, e_k: f.name}
condition = f"`$event_name` = %({e_k})s"
sub_conditions = []
for j, ef in enumerate(f.properties.filters):
p_k = f"e_{i}_p_{j}"
full_args = {**full_args, **sh.multi_values(ef.value, value_key=p_k)}
if ef.is_predefined:
sub_condition = get_sub_condition(col_name=ef.name, val_name=p_k, operator=ef.operator)
else:
sub_condition = get_sub_condition(col_name=f"properties.{ef.name}",
val_name=p_k, operator=ef.operator)
sub_conditions.append(sh.multi_conditions(sub_condition, ef.value, value_key=p_k))
if len(sub_conditions) > 0:
condition += " AND (" + (" " + f.properties.operator + " ").join(sub_conditions) + ")"
ev_constraints.append(condition)
constraints.append("(" + " OR ".join(ev_constraints) + ")")
query = ch_client.format(
f"""SELECT COUNT(1) OVER () AS total,
event_id,
`$event_name`,
created_at,
`distinct_id`,
`$browser`,
`$import`,
`$os`,
`$country`,
`$state`,
`$city`,
`$screen_height`,
`$screen_width`,
`$source`,
`$user_id`,
`$device`
FROM product_analytics.events
WHERE {" AND ".join(constraints)}
ORDER BY created_at
LIMIT %(limit)s OFFSET %(offset)s;""",
parameters=full_args)
rows = ch_client.execute(query)
if len(rows) == 0:
return {"total": 0, "rows": [], "src": 2}
total = rows[0]["total"]
for r in rows:
r.pop("total")
return {"total": total, "rows": rows, "src": 2}
def get_lexicon(project_id: int, page: schemas.PaginatedSchema):
with ClickHouseClient() as ch_client:
r = ch_client.format(
"""SELECT COUNT(1) OVER () AS total,
all_events.event_name AS name,
*
FROM product_analytics.all_events
WHERE project_id=%(project_id)s
ORDER BY display_name
LIMIT %(limit)s OFFSET %(offset)s;""",
parameters={"project_id": project_id, "limit": page.limit, "offset": (page.page - 1) * page.limit})
rows = ch_client.execute(r)
if len(rows) == 0:
return {"total": 0, "list": []}
total = rows[0]["total"]
for i, row in enumerate(rows):
row["id"] = f"event_{i}"
row["icon"] = None
row["possibleTypes"] = ["string"]
row.pop("total")
return {"total": total, "list": helper.list_to_camel_case(rows)}

View file

@ -1,83 +0,0 @@
from chalicelib.utils import helper, exp_ch_helper
from chalicelib.utils.ch_client import ClickHouseClient
import schemas
def get_all_properties(project_id: int, page: schemas.PaginatedSchema):
with ClickHouseClient() as ch_client:
r = ch_client.format(
"""SELECT COUNT(1) OVER () AS total,
property_name AS name, display_name,
array_agg(DISTINCT event_properties.value_type) AS possible_types
FROM product_analytics.all_properties
LEFT JOIN product_analytics.event_properties USING (project_id, property_name)
WHERE all_properties.project_id=%(project_id)s
GROUP BY property_name,display_name
ORDER BY display_name
LIMIT %(limit)s OFFSET %(offset)s;""",
parameters={"project_id": project_id,
"limit": page.limit,
"offset": (page.page - 1) * page.limit})
properties = ch_client.execute(r)
if len(properties) == 0:
return {"total": 0, "list": []}
total = properties[0]["total"]
properties = helper.list_to_camel_case(properties)
for i, p in enumerate(properties):
p["id"] = f"prop_{i}"
p["icon"] = None
p["possibleTypes"] = exp_ch_helper.simplify_clickhouse_types(p["possibleTypes"])
p.pop("total")
return {"total": total, "list": properties}
def get_event_properties(project_id: int, event_name):
with ClickHouseClient() as ch_client:
r = ch_client.format(
"""SELECT all_properties.property_name,
all_properties.display_name
FROM product_analytics.event_properties
INNER JOIN product_analytics.all_properties USING (property_name)
WHERE event_properties.project_id=%(project_id)s
AND all_properties.project_id=%(project_id)s
AND event_properties.event_name=%(event_name)s
ORDER BY created_at;""",
parameters={"project_id": project_id, "event_name": event_name})
properties = ch_client.execute(r)
return helper.list_to_camel_case(properties)
def get_lexicon(project_id: int, page: schemas.PaginatedSchema):
with ClickHouseClient() as ch_client:
r = ch_client.format(
"""SELECT COUNT(1) OVER () AS total,
all_properties.property_name AS name,
all_properties.*,
possible_types.values AS possible_types,
possible_values.values AS sample_values
FROM product_analytics.all_properties
LEFT JOIN (SELECT project_id, property_name, array_agg(DISTINCT value_type) AS values
FROM product_analytics.event_properties
WHERE project_id=%(project_id)s
GROUP BY 1, 2) AS possible_types
USING (project_id, property_name)
LEFT JOIN (SELECT project_id, property_name, array_agg(DISTINCT value) AS values
FROM product_analytics.property_values_samples
WHERE project_id=%(project_id)s
GROUP BY 1, 2) AS possible_values USING (project_id, property_name)
WHERE project_id=%(project_id)s
ORDER BY display_name
LIMIT %(limit)s OFFSET %(offset)s;""",
parameters={"project_id": project_id,
"limit": page.limit,
"offset": (page.page - 1) * page.limit})
properties = ch_client.execute(r)
if len(properties) == 0:
return {"total": 0, "list": []}
total = properties[0]["total"]
for i, p in enumerate(properties):
p["id"] = f"prop_{i}"
p["icon"] = None
p.pop("total")
return {"total": total, "list": helper.list_to_camel_case(properties)}

View file

@ -6,18 +6,8 @@ logger = logging.getLogger(__name__)
from . import sessions_pg from . import sessions_pg
from . import sessions_pg as sessions_legacy from . import sessions_pg as sessions_legacy
from . import sessions_ch from . import sessions_ch
from . import sessions_search_pg
from . import sessions_search_pg as sessions_search_legacy
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False): if config("EXP_METRICS", cast=bool, default=False):
logger.info(">>> Using experimental sessions search")
from . import sessions_ch as sessions from . import sessions_ch as sessions
from . import sessions_search_ch as sessions_search
else: else:
from . import sessions_pg as sessions from . import sessions_pg as sessions
from . import sessions_search_pg as sessions_search
# if config("EXP_METRICS", cast=bool, default=False):
# from . import sessions_ch as sessions
# else:
# from . import sessions_pg as sessions

View file

@ -6,7 +6,6 @@ from chalicelib.core import events, metadata
from . import performance_event, sessions_legacy from . import performance_event, sessions_legacy
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
from chalicelib.utils import sql_helper as sh from chalicelib.utils import sql_helper as sh
from chalicelib.utils.exp_ch_helper import get_sub_condition
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -49,8 +48,8 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
query = f"""SELECT gs.generate_series AS timestamp, query = f"""SELECT gs.generate_series AS timestamp,
COALESCE(COUNT(DISTINCT processed_sessions.user_id),0) AS count COALESCE(COUNT(DISTINCT processed_sessions.user_id),0) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS gs FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS gs
LEFT JOIN (SELECT multiIf(isNotNull(s.user_id) AND notEmpty(s.user_id), s.user_id, LEFT JOIN (SELECT multiIf(s.user_id IS NOT NULL AND s.user_id != '', s.user_id,
isNotNull(s.user_anonymous_id) AND notEmpty(s.user_anonymous_id), s.user_anonymous_id IS NOT NULL AND s.user_anonymous_id != '',
s.user_anonymous_id, toString(s.user_uuid)) AS user_id, s.user_anonymous_id, toString(s.user_uuid)) AS user_id,
s.datetime AS datetime s.datetime AS datetime
{query_part}) AS processed_sessions ON(TRUE) {query_part}) AS processed_sessions ON(TRUE)
@ -149,12 +148,12 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
for e in data.events: for e in data.events:
if e.type == schemas.EventType.LOCATION: if e.type == schemas.EventType.LOCATION:
if e.operator not in extra_conditions: if e.operator not in extra_conditions:
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({ extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
"type": e.type, "type": e.type,
"isEvent": True, "isEvent": True,
"value": [], "value": [],
"operator": e.operator, "operator": e.operator,
"filters": [] "filters": e.filters
}) })
for v in e.value: for v in e.value:
if v not in extra_conditions[e.operator].value: if v not in extra_conditions[e.operator].value:
@ -174,12 +173,12 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
for e in data.events: for e in data.events:
if e.type == schemas.EventType.REQUEST_DETAILS: if e.type == schemas.EventType.REQUEST_DETAILS:
if e.operator not in extra_conditions: if e.operator not in extra_conditions:
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({ extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
"type": e.type, "type": e.type,
"isEvent": True, "isEvent": True,
"value": [], "value": [],
"operator": e.operator, "operator": e.operator,
"filters": [] "filters": e.filters
}) })
for v in e.value: for v in e.value:
if v not in extra_conditions[e.operator].value: if v not in extra_conditions[e.operator].value:
@ -254,7 +253,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
FROM (SELECT s.user_id AS user_id {extra_col} FROM (SELECT s.user_id AS user_id {extra_col}
{query_part} {query_part}
WHERE isNotNull(user_id) WHERE isNotNull(user_id)
AND notEmpty(user_id)) AS filtred_sessions AND user_id != '') AS filtred_sessions
{extra_where} {extra_where}
GROUP BY {main_col} GROUP BY {main_col}
ORDER BY total DESC ORDER BY total DESC
@ -278,7 +277,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
return sessions return sessions
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema): def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS, return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
schemas.EventType.GRAPHQL] \ schemas.EventType.GRAPHQL] \
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE, or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
@ -331,11 +330,7 @@ def json_condition(table_alias, json_column, json_key, op, values, value_key, ch
extract_func = "JSONExtractFloat" if numeric_type == "float" else "JSONExtractInt" extract_func = "JSONExtractFloat" if numeric_type == "float" else "JSONExtractInt"
condition = f"{extract_func}(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s" condition = f"{extract_func}(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s"
else: else:
# condition = f"JSONExtractString(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s" condition = f"JSONExtractString(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s"
condition = get_sub_condition(
col_name=f"JSONExtractString(toString({table_alias}.`{json_column}`), '{json_key}')",
val_name=value_key, operator=op
)
conditions.append(sh.multi_conditions(condition, values, value_key=value_key)) conditions.append(sh.multi_conditions(condition, values, value_key=value_key))
@ -665,8 +660,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
event.value = helper.values_for_operator(value=event.value, op=event.operator) event.value = helper.values_for_operator(value=event.value, op=event.operator)
full_args = {**full_args, full_args = {**full_args,
**sh.multi_values(event.value, value_key=e_k), **sh.multi_values(event.value, value_key=e_k),
**sh.multi_values(event.source, value_key=s_k), **sh.multi_values(event.source, value_key=s_k)}
e_k: event.value[0] if len(event.value) > 0 else event.value}
if event_type == events.EventType.CLICK.ui_type: if event_type == events.EventType.CLICK.ui_type:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main " event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
@ -677,44 +671,24 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]}) events_conditions.append({"type": event_where[-1]})
if not is_any: if not is_any:
if schemas.ClickEventExtraOperator.has_value(event.operator): if schemas.ClickEventExtraOperator.has_value(event.operator):
# event_where.append(json_condition( event_where.append(json_condition(
# "main", "main",
# "$properties", "$properties",
# "selector", op, event.value, e_k) "selector", op, event.value, e_k)
# )
event_where.append(
sh.multi_conditions(
get_sub_condition(col_name=f"main.`$properties`.selector",
val_name=e_k, operator=event.operator),
event.value, value_key=e_k)
) )
events_conditions[-1]["condition"] = event_where[-1] events_conditions[-1]["condition"] = event_where[-1]
else: else:
if is_not: if is_not:
# event_where.append(json_condition( event_where.append(json_condition(
# "sub", "$properties", _column, op, event.value, e_k "sub", "$properties", _column, op, event.value, e_k
# )) ))
event_where.append(
sh.multi_conditions(
get_sub_condition(col_name=f"sub.`$properties`.{_column}",
val_name=e_k, operator=event.operator),
event.value, value_key=e_k)
)
events_conditions_not.append( events_conditions_not.append(
{ {
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'" "type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
}
)
events_conditions_not[-1]["condition"] = event_where[-1] events_conditions_not[-1]["condition"] = event_where[-1]
else: else:
# event_where.append(
# json_condition("main", "$properties", _column, op, event.value, e_k)
# )
event_where.append( event_where.append(
sh.multi_conditions( json_condition("main", "$properties", _column, op, event.value, e_k)
get_sub_condition(col_name=f"main.`$properties`.{_column}",
val_name=e_k, operator=event.operator),
event.value, value_key=e_k)
) )
events_conditions[-1]["condition"] = event_where[-1] events_conditions[-1]["condition"] = event_where[-1]
else: else:
@ -896,15 +870,12 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions[-1]["condition"] = [] events_conditions[-1]["condition"] = []
if not is_any and event.value not in [None, "*", ""]: if not is_any and event.value not in [None, "*", ""]:
event_where.append( event_where.append(
sh.multi_conditions( sh.multi_conditions(f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)", event.value, value_key=e_k))
event.value, value_key=e_k))
events_conditions[-1]["condition"].append(event_where[-1]) events_conditions[-1]["condition"].append(event_where[-1])
events_extra_join += f" AND {event_where[-1]}" events_extra_join += f" AND {event_where[-1]}"
if len(event.source) > 0 and event.source[0] not in [None, "*", ""]: if len(event.source) > 0 and event.source[0] not in [None, "*", ""]:
event_where.append( event_where.append(sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source, value_key=s_k))
sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source,
value_key=s_k))
events_conditions[-1]["condition"].append(event_where[-1]) events_conditions[-1]["condition"].append(event_where[-1])
events_extra_join += f" AND {event_where[-1]}" events_extra_join += f" AND {event_where[-1]}"
@ -1137,8 +1108,12 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
is_any = sh.isAny_opreator(f.operator) is_any = sh.isAny_opreator(f.operator)
if is_any or len(f.value) == 0: if is_any or len(f.value) == 0:
continue continue
is_negative_operator = sh.is_negation_operator(f.operator)
f.value = helper.values_for_operator(value=f.value, op=f.operator) f.value = helper.values_for_operator(value=f.value, op=f.operator)
op = sh.get_sql_operator(f.operator) op = sh.get_sql_operator(f.operator)
r_op = ""
if is_negative_operator:
r_op = sh.reverse_sql_operator(op)
e_k_f = e_k + f"_fetch{j}" e_k_f = e_k + f"_fetch{j}"
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)} full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
if f.type == schemas.FetchFilterType.FETCH_URL: if f.type == schemas.FetchFilterType.FETCH_URL:
@ -1147,6 +1122,12 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
)) ))
events_conditions[-1]["condition"].append(event_where[-1]) events_conditions[-1]["condition"].append(event_where[-1])
apply = True apply = True
if is_negative_operator:
events_conditions_not.append(
{
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = sh.multi_conditions(
f"sub.`$properties`.url_path {r_op} %({e_k_f})s", f.value, value_key=e_k_f)
elif f.type == schemas.FetchFilterType.FETCH_STATUS_CODE: elif f.type == schemas.FetchFilterType.FETCH_STATUS_CODE:
event_where.append(json_condition( event_where.append(json_condition(
"main", "$properties", 'status', op, f.value, e_k_f, True, True "main", "$properties", 'status', op, f.value, e_k_f, True, True
@ -1159,6 +1140,13 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
)) ))
events_conditions[-1]["condition"].append(event_where[-1]) events_conditions[-1]["condition"].append(event_where[-1])
apply = True apply = True
if is_negative_operator:
events_conditions_not.append(
{
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = sh.multi_conditions(
f"sub.`$properties`.method {r_op} %({e_k_f})s", f.value,
value_key=e_k_f)
elif f.type == schemas.FetchFilterType.FETCH_DURATION: elif f.type == schemas.FetchFilterType.FETCH_DURATION:
event_where.append( event_where.append(
sh.multi_conditions(f"main.`$duration_s` {f.operator} %({e_k_f})s/1000", f.value, sh.multi_conditions(f"main.`$duration_s` {f.operator} %({e_k_f})s/1000", f.value,
@ -1171,12 +1159,26 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
)) ))
events_conditions[-1]["condition"].append(event_where[-1]) events_conditions[-1]["condition"].append(event_where[-1])
apply = True apply = True
if is_negative_operator:
events_conditions_not.append(
{
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = sh.multi_conditions(
f"sub.`$properties`.request_body {r_op} %({e_k_f})s", f.value,
value_key=e_k_f)
elif f.type == schemas.FetchFilterType.FETCH_RESPONSE_BODY: elif f.type == schemas.FetchFilterType.FETCH_RESPONSE_BODY:
event_where.append(json_condition( event_where.append(json_condition(
"main", "$properties", 'response_body', op, f.value, e_k_f "main", "$properties", 'response_body', op, f.value, e_k_f
)) ))
events_conditions[-1]["condition"].append(event_where[-1]) events_conditions[-1]["condition"].append(event_where[-1])
apply = True apply = True
if is_negative_operator:
events_conditions_not.append(
{
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = sh.multi_conditions(
f"sub.`$properties`.response_body {r_op} %({e_k_f})s", f.value,
value_key=e_k_f)
else: else:
logging.warning(f"undefined FETCH filter: {f.type}") logging.warning(f"undefined FETCH filter: {f.type}")
if not apply: if not apply:
@ -1220,35 +1222,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
else: else:
logging.warning(f"undefined GRAPHQL filter: {f.type}") logging.warning(f"undefined GRAPHQL filter: {f.type}")
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"]) events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
elif event_type == schemas.EventType.EVENT:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = events.EventType.CLICK.column
event_where.append(f"main.`$event_name`=%({e_k})s AND main.session_id>0")
events_conditions.append({"type": event_where[-1], "condition": ""})
else: else:
continue continue
if event.properties is not None and len(event.properties.filters) > 0:
sub_conditions = []
for l, property in enumerate(event.properties.filters):
a_k = f"{e_k}_att_{l}"
full_args = {**full_args,
**sh.multi_values(property.value, value_key=a_k)}
if property.is_predefined:
condition = get_sub_condition(col_name=f"main.{property.name}",
val_name=a_k, operator=property.operator)
else:
condition = get_sub_condition(col_name=f"main.properties.{property.name}",
val_name=a_k, operator=property.operator)
event_where.append(
sh.multi_conditions(condition, property.value, value_key=a_k)
)
sub_conditions.append(event_where[-1])
if len(sub_conditions) > 0:
sub_conditions = (" " + event.properties.operator + " ").join(sub_conditions)
events_conditions[-1]["condition"] += " AND " if len(events_conditions[-1]["condition"]) > 0 else ""
events_conditions[-1]["condition"] += "(" + sub_conditions + ")"
if event_index == 0 or or_events: if event_index == 0 or or_events:
event_where += ss_constraints event_where += ss_constraints
if is_not: if is_not:
@ -1451,17 +1426,30 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
if extra_conditions and len(extra_conditions) > 0: if extra_conditions and len(extra_conditions) > 0:
_extra_or_condition = [] _extra_or_condition = []
for i, c in enumerate(extra_conditions): for i, c in enumerate(extra_conditions):
if sh.isAny_opreator(c.operator): if sh.isAny_opreator(c.operator) and c.type != schemas.EventType.REQUEST_DETAILS.value:
continue continue
e_k = f"ec_value{i}" e_k = f"ec_value{i}"
op = sh.get_sql_operator(c.operator) op = sh.get_sql_operator(c.operator)
c.value = helper.values_for_operator(value=c.value, op=c.operator) c.value = helper.values_for_operator(value=c.value, op=c.operator)
full_args = {**full_args, full_args = {**full_args,
**sh.multi_values(c.value, value_key=e_k)} **sh.multi_values(c.value, value_key=e_k)}
if c.type == events.EventType.LOCATION.ui_type: if c.type in (schemas.EventType.LOCATION.value, schemas.EventType.REQUEST.value):
_extra_or_condition.append( _extra_or_condition.append(
sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s", sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s",
c.value, value_key=e_k)) c.value, value_key=e_k))
elif c.type == schemas.EventType.REQUEST_DETAILS.value:
for j, c_f in enumerate(c.filters):
if sh.isAny_opreator(c_f.operator) or len(c_f.value) == 0:
continue
e_k += f"_{j}"
op = sh.get_sql_operator(c_f.operator)
c_f.value = helper.values_for_operator(value=c_f.value, op=c_f.operator)
full_args = {**full_args,
**sh.multi_values(c_f.value, value_key=e_k)}
if c_f.type == schemas.FetchFilterType.FETCH_URL.value:
_extra_or_condition.append(
sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s",
c_f.value, value_key=e_k))
else: else:
logging.warning(f"unsupported extra_event type:${c.type}") logging.warning(f"unsupported extra_event type:${c.type}")
if len(_extra_or_condition) > 0: if len(_extra_or_condition) > 0:

View file

@ -143,12 +143,12 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
for e in data.events: for e in data.events:
if e.type == schemas.EventType.LOCATION: if e.type == schemas.EventType.LOCATION:
if e.operator not in extra_conditions: if e.operator not in extra_conditions:
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({ extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
"type": e.type, "type": e.type,
"isEvent": True, "isEvent": True,
"value": [], "value": [],
"operator": e.operator, "operator": e.operator,
"filters": [] "filters": e.filters
}) })
for v in e.value: for v in e.value:
if v not in extra_conditions[e.operator].value: if v not in extra_conditions[e.operator].value:
@ -160,12 +160,12 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
for e in data.events: for e in data.events:
if e.type == schemas.EventType.REQUEST_DETAILS: if e.type == schemas.EventType.REQUEST_DETAILS:
if e.operator not in extra_conditions: if e.operator not in extra_conditions:
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({ extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
"type": e.type, "type": e.type,
"isEvent": True, "isEvent": True,
"value": [], "value": [],
"operator": e.operator, "operator": e.operator,
"filters": [] "filters": e.filters
}) })
for v in e.value: for v in e.value:
if v not in extra_conditions[e.operator].value: if v not in extra_conditions[e.operator].value:
@ -273,7 +273,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
return sessions return sessions
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema): def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS, return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
schemas.EventType.GRAPHQL] \ schemas.EventType.GRAPHQL] \
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE, or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
@ -989,7 +989,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
sh.multi_conditions(f"ev.{events.EventType.LOCATION.column} {op} %({e_k})s", sh.multi_conditions(f"ev.{events.EventType.LOCATION.column} {op} %({e_k})s",
c.value, value_key=e_k)) c.value, value_key=e_k))
else: else:
logger.warning(f"unsupported extra_event type:${c.type}") logger.warning(f"unsupported extra_event type: {c.type}")
if len(_extra_or_condition) > 0: if len(_extra_or_condition) > 0:
extra_constraints.append("(" + " OR ".join(_extra_or_condition) + ")") extra_constraints.append("(" + " OR ".join(_extra_or_condition) + ")")
query_part = f"""\ query_part = f"""\

View file

@ -11,3 +11,9 @@ if smtp.has_smtp():
logger.info("valid SMTP configuration found") logger.info("valid SMTP configuration found")
else: else:
logger.info("no SMTP configuration found or SMTP validation failed") logger.info("no SMTP configuration found or SMTP validation failed")
if config("EXP_CH_DRIVER", cast=bool, default=True):
logging.info(">>> Using new CH driver")
from . import ch_client_exp as ch_client
else:
from . import ch_client

View file

@ -1,185 +1,73 @@
import logging import logging
import threading
import time
from functools import wraps
from queue import Queue, Empty
import clickhouse_connect import clickhouse_driver
from clickhouse_connect.driver.query import QueryContext
from decouple import config from decouple import config
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
_CH_CONFIG = {"host": config("ch_host"),
"user": config("ch_user", default="default"),
"password": config("ch_password", default=""),
"port": config("ch_port_http", cast=int),
"client_name": config("APP_NAME", default="PY")}
CH_CONFIG = dict(_CH_CONFIG)
settings = {} settings = {}
if config('ch_timeout', cast=int, default=-1) > 0: if config('ch_timeout', cast=int, default=-1) > 0:
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s") logger.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)} settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
if config('ch_receive_timeout', cast=int, default=-1) > 0: if config('ch_receive_timeout', cast=int, default=-1) > 0:
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s") logger.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)} settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
extra_args = {}
if config("CH_COMPRESSION", cast=bool, default=True):
extra_args["compression"] = "lz4"
def transform_result(self, original_function):
@wraps(original_function)
def wrapper(*args, **kwargs):
if kwargs.get("parameters"):
if config("LOCAL_DEV", cast=bool, default=False):
logger.debug(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters")))
else:
logger.debug(
str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
elif len(args) > 0:
if config("LOCAL_DEV", cast=bool, default=False):
logger.debug(args[0])
else:
logger.debug(str.encode(args[0]))
result = original_function(*args, **kwargs)
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
column_names = result.column_names
result = result.result_rows
result = [dict(zip(column_names, row)) for row in result]
return result
return wrapper
class ClickHouseConnectionPool:
def __init__(self, min_size, max_size):
self.min_size = min_size
self.max_size = max_size
self.pool = Queue()
self.lock = threading.Lock()
self.total_connections = 0
# Initialize the pool with min_size connections
for _ in range(self.min_size):
client = clickhouse_connect.get_client(**CH_CONFIG,
database=config("ch_database", default="default"),
settings=settings,
**extra_args)
self.pool.put(client)
self.total_connections += 1
def get_connection(self):
try:
# Try to get a connection without blocking
client = self.pool.get_nowait()
return client
except Empty:
with self.lock:
if self.total_connections < self.max_size:
client = clickhouse_connect.get_client(**CH_CONFIG,
database=config("ch_database", default="default"),
settings=settings,
**extra_args)
self.total_connections += 1
return client
# If max_size reached, wait until a connection is available
client = self.pool.get()
return client
def release_connection(self, client):
self.pool.put(client)
def close_all(self):
with self.lock:
while not self.pool.empty():
client = self.pool.get()
client.close()
self.total_connections = 0
CH_pool: ClickHouseConnectionPool = None
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
RETRY = 0
def make_pool():
if not config('CH_POOL', cast=bool, default=True):
return
global CH_pool
global RETRY
if CH_pool is not None:
try:
CH_pool.close_all()
except Exception as error:
logger.error("Error while closing all connexions to CH", exc_info=error)
try:
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
max_size=config("CH_MAXCONN", cast=int, default=8))
if CH_pool is not None:
logger.info("Connection pool created successfully for CH")
except ConnectionError as error:
logger.error("Error while connecting to CH", exc_info=error)
if RETRY < RETRY_MAX:
RETRY += 1
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
time.sleep(RETRY_INTERVAL)
make_pool()
else:
raise error
class ClickHouseClient: class ClickHouseClient:
__client = None __client = None
def __init__(self, database=None): def __init__(self, database=None):
if self.__client is None: extra_args = {}
if database is not None or not config('CH_POOL', cast=bool, default=True): if config("CH_COMPRESSION", cast=bool, default=True):
self.__client = clickhouse_connect.get_client(**CH_CONFIG, extra_args["compression"] = "lz4"
database=database if database else config("ch_database", self.__client = clickhouse_driver.Client(host=config("ch_host"),
default="default"), database=database if database else config("ch_database",
settings=settings, default="default"),
**extra_args) user=config("ch_user", default="default"),
password=config("ch_password", default=""),
else: port=config("ch_port", cast=int),
self.__client = CH_pool.get_connection() settings=settings,
**extra_args) \
self.__client.execute = transform_result(self, self.__client.query) if self.__client is None else self.__client
self.__client.format = self.format
def __enter__(self): def __enter__(self):
return self
def execute(self, query, parameters=None, **args):
try:
results = self.__client.execute(query=query, params=parameters, with_column_types=True, **args)
keys = tuple(x for x, y in results[1])
return [dict(zip(keys, i)) for i in results[0]]
except Exception as err:
logger.error("--------- CH EXCEPTION -----------", exc_info=err)
logger.error("--------- CH QUERY EXCEPTION -----------")
logger.error(self.format(query=query, parameters=parameters)
.replace('\n', '\\n')
.replace(' ', ' ')
.replace(' ', ' '))
logger.error("--------------------")
raise err
def insert(self, query, params=None, **args):
return self.__client.execute(query=query, params=params, **args)
def client(self):
return self.__client return self.__client
def format(self, query, parameters=None): def format(self, query, parameters):
if parameters: if parameters is None:
ctx = QueryContext(query=query, parameters=parameters) return query
return ctx.final_query return self.__client.substitute_params(query, parameters, self.__client.connection.context)
return query
def __exit__(self, *args): def __exit__(self, *args):
if config('CH_POOL', cast=bool, default=True): pass
CH_pool.release_connection(self.__client)
else:
self.__client.close()
async def init(): async def init():
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}") logger.info(f">CH_POOL:not defined")
if config('CH_POOL', cast=bool, default=True):
make_pool()
async def terminate(): async def terminate():
global CH_pool pass
if CH_pool is not None:
try:
CH_pool.close_all()
logger.info("Closed all connexions to CH")
except Exception as error:
logger.error("Error while closing all connexions to CH", exc_info=error)

View file

@ -0,0 +1,178 @@
import logging
import threading
import time
from functools import wraps
from queue import Queue, Empty
import clickhouse_connect
from clickhouse_connect.driver.query import QueryContext
from decouple import config
logger = logging.getLogger(__name__)
_CH_CONFIG = {"host": config("ch_host"),
"user": config("ch_user", default="default"),
"password": config("ch_password", default=""),
"port": config("ch_port_http", cast=int),
"client_name": config("APP_NAME", default="PY")}
CH_CONFIG = dict(_CH_CONFIG)
settings = {}
if config('ch_timeout', cast=int, default=-1) > 0:
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
if config('ch_receive_timeout', cast=int, default=-1) > 0:
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
extra_args = {}
if config("CH_COMPRESSION", cast=bool, default=True):
extra_args["compression"] = "lz4"
def transform_result(self, original_function):
@wraps(original_function)
def wrapper(*args, **kwargs):
if kwargs.get("parameters"):
logger.debug(str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
elif len(args) > 0:
logger.debug(str.encode(args[0]))
result = original_function(*args, **kwargs)
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
column_names = result.column_names
result = result.result_rows
result = [dict(zip(column_names, row)) for row in result]
return result
return wrapper
class ClickHouseConnectionPool:
def __init__(self, min_size, max_size):
self.min_size = min_size
self.max_size = max_size
self.pool = Queue()
self.lock = threading.Lock()
self.total_connections = 0
# Initialize the pool with min_size connections
for _ in range(self.min_size):
client = clickhouse_connect.get_client(**CH_CONFIG,
database=config("ch_database", default="default"),
settings=settings,
**extra_args)
self.pool.put(client)
self.total_connections += 1
def get_connection(self):
try:
# Try to get a connection without blocking
client = self.pool.get_nowait()
return client
except Empty:
with self.lock:
if self.total_connections < self.max_size:
client = clickhouse_connect.get_client(**CH_CONFIG,
database=config("ch_database", default="default"),
settings=settings,
**extra_args)
self.total_connections += 1
return client
# If max_size reached, wait until a connection is available
client = self.pool.get()
return client
def release_connection(self, client):
self.pool.put(client)
def close_all(self):
with self.lock:
while not self.pool.empty():
client = self.pool.get()
client.close()
self.total_connections = 0
CH_pool: ClickHouseConnectionPool = None
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
RETRY = 0
def make_pool():
if not config('CH_POOL', cast=bool, default=True):
return
global CH_pool
global RETRY
if CH_pool is not None:
try:
CH_pool.close_all()
except Exception as error:
logger.error("Error while closing all connexions to CH", exc_info=error)
try:
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
max_size=config("CH_MAXCONN", cast=int, default=8))
if CH_pool is not None:
logger.info("Connection pool created successfully for CH")
except ConnectionError as error:
logger.error("Error while connecting to CH", exc_info=error)
if RETRY < RETRY_MAX:
RETRY += 1
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
time.sleep(RETRY_INTERVAL)
make_pool()
else:
raise error
class ClickHouseClient:
__client = None
def __init__(self, database=None):
if self.__client is None:
if database is not None or not config('CH_POOL', cast=bool, default=True):
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
database=database if database else config("ch_database",
default="default"),
settings=settings,
**extra_args)
else:
self.__client = CH_pool.get_connection()
self.__client.execute = transform_result(self, self.__client.query)
self.__client.format = self.format
def __enter__(self):
return self.__client
def format(self, query, parameters=None):
if parameters:
ctx = QueryContext(query=query, parameters=parameters)
return ctx.final_query
return query
def __exit__(self, *args):
if config('CH_POOL', cast=bool, default=True):
CH_pool.release_connection(self.__client)
else:
self.__client.close()
async def init():
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
if config('CH_POOL', cast=bool, default=True):
make_pool()
async def terminate():
global CH_pool
if CH_pool is not None:
try:
CH_pool.close_all()
logger.info("Closed all connexions to CH")
except Exception as error:
logger.error("Error while closing all connexions to CH", exc_info=error)

View file

@ -1,10 +1,7 @@
import logging
import re
from typing import Union from typing import Union
import schemas import schemas
from chalicelib.utils import sql_helper as sh import logging
from schemas import SearchEventOperator
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -69,94 +66,3 @@ def get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEvent
if event_type not in defs: if event_type not in defs:
raise Exception(f"unsupported EventType:{event_type}") raise Exception(f"unsupported EventType:{event_type}")
return defs.get(event_type) return defs.get(event_type)
# AI generated
def simplify_clickhouse_type(ch_type: str) -> str:
"""
Simplify a ClickHouse data type name to a broader category like:
int, float, decimal, datetime, string, uuid, enum, array, tuple, map, nested, etc.
"""
# 1) Strip out common wrappers like Nullable(...) or LowCardinality(...)
# Possibly multiple wrappers: e.g. "LowCardinality(Nullable(Int32))"
pattern_wrappers = re.compile(r'(Nullable|LowCardinality)\((.*)\)')
while True:
match = pattern_wrappers.match(ch_type)
if match:
ch_type = match.group(2)
else:
break
# 2) Normalize (lowercase) for easier checks
normalized_type = ch_type.lower()
# 3) Use pattern matching or direct checks for known categories
# (You can adapt this as you see fit for your environment.)
# Integers: Int8, Int16, Int32, Int64, Int128, Int256, UInt8, UInt16, ...
if re.match(r'^(u?int)(8|16|32|64|128|256)$', normalized_type):
return "int"
# Floats: Float32, Float64
if re.match(r'^float(32|64)$', normalized_type):
return "float"
# Decimal: Decimal(P, S)
if normalized_type.startswith("decimal"):
return "decimal"
# Date/DateTime
if normalized_type.startswith("date"):
return "datetime"
if normalized_type.startswith("datetime"):
return "datetime"
# Strings: String, FixedString(N)
if normalized_type.startswith("string"):
return "string"
if normalized_type.startswith("fixedstring"):
return "string"
# UUID
if normalized_type.startswith("uuid"):
return "uuid"
# Enums: Enum8(...) or Enum16(...)
if normalized_type.startswith("enum8") or normalized_type.startswith("enum16"):
return "enum"
# Arrays: Array(T)
if normalized_type.startswith("array"):
return "array"
# Tuples: Tuple(T1, T2, ...)
if normalized_type.startswith("tuple"):
return "tuple"
# Map(K, V)
if normalized_type.startswith("map"):
return "map"
# Nested(...)
if normalized_type.startswith("nested"):
return "nested"
# If we didn't match above, just return the original type in lowercase
return normalized_type
def simplify_clickhouse_types(ch_types: list[str]) -> list[str]:
"""
Takes a list of ClickHouse types and returns a list of simplified types
by calling `simplify_clickhouse_type` on each.
"""
return list(set([simplify_clickhouse_type(t) for t in ch_types]))
def get_sub_condition(col_name: str, val_name: str,
operator: Union[schemas.SearchEventOperator, schemas.MathOperator]):
if operator == SearchEventOperator.PATTERN:
return f"match({col_name}, %({val_name})s)"
op = sh.get_sql_operator(operator)
return f"{col_name} {op} %({val_name})s"

View file

@ -4,40 +4,41 @@ import schemas
def get_sql_operator(op: Union[schemas.SearchEventOperator, schemas.ClickEventExtraOperator, schemas.MathOperator]): def get_sql_operator(op: Union[schemas.SearchEventOperator, schemas.ClickEventExtraOperator, schemas.MathOperator]):
if isinstance(op, Enum):
op = op.value
return { return {
schemas.SearchEventOperator.IS: "=", schemas.SearchEventOperator.IS.value: "=",
schemas.SearchEventOperator.ON: "=", schemas.SearchEventOperator.ON.value: "=",
schemas.SearchEventOperator.ON_ANY: "IN", schemas.SearchEventOperator.ON_ANY.value: "IN",
schemas.SearchEventOperator.IS_NOT: "!=", schemas.SearchEventOperator.IS_NOT.value: "!=",
schemas.SearchEventOperator.NOT_ON: "!=", schemas.SearchEventOperator.NOT_ON.value: "!=",
schemas.SearchEventOperator.CONTAINS: "ILIKE", schemas.SearchEventOperator.CONTAINS.value: "ILIKE",
schemas.SearchEventOperator.NOT_CONTAINS: "NOT ILIKE", schemas.SearchEventOperator.NOT_CONTAINS.value: "NOT ILIKE",
schemas.SearchEventOperator.STARTS_WITH: "ILIKE", schemas.SearchEventOperator.STARTS_WITH.value: "ILIKE",
schemas.SearchEventOperator.ENDS_WITH: "ILIKE", schemas.SearchEventOperator.ENDS_WITH.value: "ILIKE",
# this is not used as an operator, it is used in order to maintain a valid value for conditions
schemas.SearchEventOperator.PATTERN: "regex",
# Selector operators: # Selector operators:
schemas.ClickEventExtraOperator.IS: "=", schemas.ClickEventExtraOperator.IS.value: "=",
schemas.ClickEventExtraOperator.IS_NOT: "!=", schemas.ClickEventExtraOperator.IS_NOT.value: "!=",
schemas.ClickEventExtraOperator.CONTAINS: "ILIKE", schemas.ClickEventExtraOperator.CONTAINS.value: "ILIKE",
schemas.ClickEventExtraOperator.NOT_CONTAINS: "NOT ILIKE", schemas.ClickEventExtraOperator.NOT_CONTAINS.value: "NOT ILIKE",
schemas.ClickEventExtraOperator.STARTS_WITH: "ILIKE", schemas.ClickEventExtraOperator.STARTS_WITH.value: "ILIKE",
schemas.ClickEventExtraOperator.ENDS_WITH: "ILIKE", schemas.ClickEventExtraOperator.ENDS_WITH.value: "ILIKE",
schemas.MathOperator.GREATER: ">", schemas.MathOperator.GREATER.value: ">",
schemas.MathOperator.GREATER_EQ: ">=", schemas.MathOperator.GREATER_EQ.value: ">=",
schemas.MathOperator.LESS: "<", schemas.MathOperator.LESS.value: "<",
schemas.MathOperator.LESS_EQ: "<=", schemas.MathOperator.LESS_EQ.value: "<=",
}.get(op, "=") }.get(op, "=")
def is_negation_operator(op: schemas.SearchEventOperator): def is_negation_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator.IS_NOT, if isinstance(op, Enum):
schemas.SearchEventOperator.NOT_ON, op = op.value
schemas.SearchEventOperator.NOT_CONTAINS, return op in [schemas.SearchEventOperator.IS_NOT.value,
schemas.ClickEventExtraOperator.IS_NOT, schemas.SearchEventOperator.NOT_ON.value,
schemas.ClickEventExtraOperator.NOT_CONTAINS] schemas.SearchEventOperator.NOT_CONTAINS.value,
schemas.ClickEventExtraOperator.IS_NOT.value,
schemas.ClickEventExtraOperator.NOT_CONTAINS.value]
def reverse_sql_operator(op): def reverse_sql_operator(op):
@ -75,3 +76,4 @@ def single_value(values):
if isinstance(v, Enum): if isinstance(v, Enum):
values[i] = v.value values[i] = v.value
return values return values

View file

@ -75,4 +75,3 @@ EXP_AUTOCOMPLETE=true
EXP_ALERTS=true EXP_ALERTS=true
EXP_ERRORS_SEARCH=true EXP_ERRORS_SEARCH=true
EXP_METRICS=true EXP_METRICS=true
EXP_SESSIONS_SEARCH=true

View file

@ -1,15 +1,16 @@
urllib3==2.3.0 urllib3==2.3.0
requests==2.32.3 requests==2.32.3
boto3==1.37.21 boto3==1.36.12
pyjwt==2.10.1 pyjwt==2.10.1
psycopg2-binary==2.9.10 psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.6 psycopg[pool,binary]==3.2.4
clickhouse-driver[lz4]==0.2.9
clickhouse-connect==0.8.15 clickhouse-connect==0.8.15
elasticsearch==8.17.2 elasticsearch==8.17.1
jira==3.8.0 jira==3.8.0
cachetools==5.5.2 cachetools==5.5.1
fastapi==0.115.12 fastapi==0.115.8
uvicorn[standard]==0.34.0 uvicorn[standard]==0.34.0
python-decouple==3.8 python-decouple==3.8
pydantic[email]==2.10.6 pydantic[email]==2.10.6

View file

@ -1,15 +1,16 @@
urllib3==2.3.0 urllib3==2.3.0
requests==2.32.3 requests==2.32.3
boto3==1.37.21 boto3==1.36.12
pyjwt==2.10.1 pyjwt==2.10.1
psycopg2-binary==2.9.10 psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.6 psycopg[pool,binary]==3.2.4
clickhouse-driver[lz4]==0.2.9
clickhouse-connect==0.8.15 clickhouse-connect==0.8.15
elasticsearch==8.17.2 elasticsearch==8.17.1
jira==3.8.0 jira==3.8.0
cachetools==5.5.2 cachetools==5.5.1
fastapi==0.115.12 fastapi==0.115.8
uvicorn[standard]==0.34.0 uvicorn[standard]==0.34.0
python-decouple==3.8 python-decouple==3.8
pydantic[email]==2.10.6 pydantic[email]==2.10.6

View file

@ -1,55 +0,0 @@
from typing import Annotated
from fastapi import Body, Depends, Query
import schemas
from chalicelib.core import metadata
from chalicelib.core.product_analytics import events, properties
from or_dependencies import OR_context
from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@app.get('/{projectId}/filters', tags=["product_analytics"])
def get_all_filters(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
context: schemas.CurrentContext = Depends(OR_context)):
return {
"data": {
"events": events.get_events(project_id=projectId, page=filter_query),
"filters": properties.get_all_properties(project_id=projectId, page=filter_query),
"metadata": metadata.get_for_filters(project_id=projectId)
}
}
@app.get('/{projectId}/events/names', tags=["product_analytics"])
def get_all_events(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": events.get_events(project_id=projectId, page=filter_query)}
@app.get('/{projectId}/properties/search', tags=["product_analytics"])
def get_event_properties(projectId: int, event_name: str = None,
context: schemas.CurrentContext = Depends(OR_context)):
if not event_name or len(event_name) == 0:
return {"data": []}
return {"data": properties.get_event_properties(project_id=projectId, event_name=event_name)}
@app.post('/{projectId}/events/search', tags=["product_analytics"])
def search_events(projectId: int, data: schemas.EventsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": events.search_events(project_id=projectId, data=data)}
@app.get('/{projectId}/lexicon/events', tags=["product_analytics", "lexicon"])
def get_all_lexicon_events(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": events.get_lexicon(project_id=projectId, page=filter_query)}
@app.get('/{projectId}/lexicon/properties', tags=["product_analytics", "lexicon"])
def get_all_lexicon_properties(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": properties.get_lexicon(project_id=projectId, page=filter_query)}

View file

@ -0,0 +1,15 @@
import schemas
from chalicelib.core.metrics import product_anaytics2
from fastapi import Depends
from or_dependencies import OR_context
from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@app.post('/{projectId}/events/search', tags=["dashboard"])
def search_events(projectId: int,
# data: schemas.CreateDashboardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return product_anaytics2.search_events(project_id=projectId, data={})

View file

@ -1,12 +1,10 @@
from typing import Annotated from fastapi import Body, Depends
from fastapi import Body, Depends, Query
import schemas
from chalicelib.core.usability_testing import service from chalicelib.core.usability_testing import service
from chalicelib.core.usability_testing.schema import UTTestCreate, UTTestUpdate, UTTestSearch from chalicelib.core.usability_testing.schema import UTTestCreate, UTTestUpdate, UTTestSearch
from or_dependencies import OR_context from or_dependencies import OR_context
from routers.base import get_routers from routers.base import get_routers
from schemas import schemas
public_app, app, app_apikey = get_routers() public_app, app, app_apikey = get_routers()
tags = ["usability-tests"] tags = ["usability-tests"]
@ -79,8 +77,9 @@ async def update_ut_test(projectId: int, test_id: int, test_update: UTTestUpdate
@app.get('/{projectId}/usability-tests/{test_id}/sessions', tags=tags) @app.get('/{projectId}/usability-tests/{test_id}/sessions', tags=tags)
async def get_sessions(projectId: int, test_id: int, filter_query: Annotated[schemas.PaginatedSchema, Query()], async def get_sessions(projectId: int, test_id: int, page: int = 1, limit: int = 10,
live: bool = False, user_id: str = None): live: bool = False,
user_id: str = None):
""" """
Get sessions related to a specific UT test. Get sessions related to a specific UT test.
@ -89,21 +88,20 @@ async def get_sessions(projectId: int, test_id: int, filter_query: Annotated[sch
""" """
if live: if live:
return service.ut_tests_sessions_live(projectId, test_id, filter_query.page, filter_query.limit) return service.ut_tests_sessions_live(projectId, test_id, page, limit)
else: else:
return service.ut_tests_sessions(projectId, test_id, filter_query.page, filter_query.limit, user_id, live) return service.ut_tests_sessions(projectId, test_id, page, limit, user_id, live)
@app.get('/{projectId}/usability-tests/{test_id}/responses/{task_id}', tags=tags) @app.get('/{projectId}/usability-tests/{test_id}/responses/{task_id}', tags=tags)
async def get_responses(projectId: int, test_id: int, task_id: int, async def get_responses(projectId: int, test_id: int, task_id: int, page: int = 1, limit: int = 10, query: str = None):
filter_query: Annotated[schemas.PaginatedSchema, Query()], query: str = None):
""" """
Get responses related to a specific UT test. Get responses related to a specific UT test.
- **project_id**: The unique identifier of the project. - **project_id**: The unique identifier of the project.
- **test_id**: The unique identifier of the UT test. - **test_id**: The unique identifier of the UT test.
""" """
return service.get_responses(test_id, task_id, filter_query.page, filter_query.limit, query) return service.get_responses(test_id, task_id, page, limit, query)
@app.get('/{projectId}/usability-tests/{test_id}/statistics', tags=tags) @app.get('/{projectId}/usability-tests/{test_id}/statistics', tags=tags)

View file

@ -1,4 +1,2 @@
from .schemas import * from .schemas import *
from .product_analytics import *
from . import overrides as _overrides from . import overrides as _overrides
from .schemas import _PaginatedSchema as PaginatedSchema

View file

@ -1,22 +0,0 @@
from typing import Optional, List, Literal, Union, Annotated
from pydantic import Field
from .overrides import BaseModel
from .schemas import EventPropertiesSchema, SortOrderType, _TimedSchema, \
_PaginatedSchema, PropertyFilterSchema
class EventSearchSchema(BaseModel):
is_event: Literal[True] = True
name: str = Field(...)
properties: Optional[EventPropertiesSchema] = Field(default=None)
ProductAnalyticsGroupedFilter = Annotated[Union[EventSearchSchema, PropertyFilterSchema], \
Field(discriminator='is_event')]
class EventsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
filters: List[ProductAnalyticsGroupedFilter] = Field(...)
sort: str = Field(default="startTs")
order: SortOrderType = Field(default=SortOrderType.DESC)

View file

@ -404,7 +404,6 @@ class EventType(str, Enum):
REQUEST_MOBILE = "requestMobile" REQUEST_MOBILE = "requestMobile"
ERROR_MOBILE = "errorMobile" ERROR_MOBILE = "errorMobile"
SWIPE_MOBILE = "swipeMobile" SWIPE_MOBILE = "swipeMobile"
EVENT = "event"
class PerformanceEventType(str, Enum): class PerformanceEventType(str, Enum):
@ -465,7 +464,6 @@ class SearchEventOperator(str, Enum):
NOT_CONTAINS = "notContains" NOT_CONTAINS = "notContains"
STARTS_WITH = "startsWith" STARTS_WITH = "startsWith"
ENDS_WITH = "endsWith" ENDS_WITH = "endsWith"
PATTERN = "regex"
class ClickEventExtraOperator(str, Enum): class ClickEventExtraOperator(str, Enum):
@ -547,66 +545,7 @@ class RequestGraphqlFilterSchema(BaseModel):
return values return values
class EventPredefinedPropertyType(str, Enum): class SessionSearchEventSchema2(BaseModel):
TIME = "$time"
SOURCE = "$source"
DURATION_S = "$duration_s"
DESCRIPTION = "description"
AUTO_CAPTURED = "$auto_captured"
SDK_EDITION = "$sdk_edition"
SDK_VERSION = "$sdk_version"
DEVICE_ID = "$device_id"
OS = "$os"
OS_VERSION = "$os_version"
BROWSER = "$browser"
BROWSER_VERSION = "$browser_version"
DEVICE = "$device"
SCREEN_HEIGHT = "$screen_height"
SCREEN_WIDTH = "$screen_width"
CURRENT_URL = "$current_url"
INITIAL_REFERRER = "$initial_referrer"
REFERRING_DOMAIN = "$referring_domain"
REFERRER = "$referrer"
INITIAL_REFERRING_DOMAIN = "$initial_referring_domain"
SEARCH_ENGINE = "$search_engine"
SEARCH_ENGINE_KEYWORD = "$search_engine_keyword"
UTM_SOURCE = "utm_source"
UTM_MEDIUM = "utm_medium"
UTM_CAMPAIGN = "utm_campaign"
COUNTRY = "$country"
STATE = "$state"
CITY = "$city"
ISSUE_TYPE = "issue_type"
TAGS = "$tags"
IMPORT = "$import"
class PropertyFilterSchema(BaseModel):
is_event: Literal[False] = False
name: Union[EventPredefinedPropertyType, str] = Field(...)
operator: Union[SearchEventOperator, MathOperator] = Field(...)
value: List[Union[int, str]] = Field(...)
# property_type: Optional[Literal["string", "number", "date"]] = Field(default=None)
@computed_field
@property
def is_predefined(self) -> bool:
return EventPredefinedPropertyType.has_value(self.name)
@model_validator(mode="after")
def transform_name(self):
if isinstance(self.name, Enum):
self.name = self.name.value
return self
class EventPropertiesSchema(BaseModel):
operator: Literal["and", "or"] = Field(...)
filters: List[PropertyFilterSchema] = Field(...)
class SessionSearchEventSchema(BaseModel):
is_event: Literal[True] = True is_event: Literal[True] = True
value: List[Union[str, int]] = Field(...) value: List[Union[str, int]] = Field(...)
type: Union[EventType, PerformanceEventType] = Field(...) type: Union[EventType, PerformanceEventType] = Field(...)
@ -614,7 +553,6 @@ class SessionSearchEventSchema(BaseModel):
source: Optional[List[Union[ErrorSource, int, str]]] = Field(default=None) source: Optional[List[Union[ErrorSource, int, str]]] = Field(default=None)
sourceOperator: Optional[MathOperator] = Field(default=None) sourceOperator: Optional[MathOperator] = Field(default=None)
filters: Optional[List[RequestGraphqlFilterSchema]] = Field(default_factory=list) filters: Optional[List[RequestGraphqlFilterSchema]] = Field(default_factory=list)
properties: Optional[EventPropertiesSchema] = Field(default=None)
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values) _remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
_single_to_list_values = field_validator('value', mode='before')(single_to_list) _single_to_list_values = field_validator('value', mode='before')(single_to_list)
@ -722,12 +660,12 @@ def add_missing_is_event(values: dict):
# this type is created to allow mixing events&filters and specifying a discriminator # this type is created to allow mixing events&filters and specifying a discriminator
GroupedFilterType = Annotated[Union[SessionSearchFilterSchema, SessionSearchEventSchema], GroupedFilterType = Annotated[Union[SessionSearchFilterSchema, SessionSearchEventSchema2],
Field(discriminator='is_event'), BeforeValidator(add_missing_is_event)] Field(discriminator='is_event'), BeforeValidator(add_missing_is_event)]
class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema): class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
events: List[SessionSearchEventSchema] = Field(default_factory=list, doc_hidden=True) events: List[SessionSearchEventSchema2] = Field(default_factory=list, doc_hidden=True)
filters: List[GroupedFilterType] = Field(default_factory=list) filters: List[GroupedFilterType] = Field(default_factory=list)
sort: str = Field(default="startTs") sort: str = Field(default="startTs")
order: SortOrderType = Field(default=SortOrderType.DESC) order: SortOrderType = Field(default=SortOrderType.DESC)
@ -752,8 +690,6 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
def add_missing_attributes(cls, values): def add_missing_attributes(cls, values):
# in case isEvent is wrong: # in case isEvent is wrong:
for f in values.get("filters") or []: for f in values.get("filters") or []:
if f.get("type") is None:
continue
if EventType.has_value(f["type"]) and not f.get("isEvent"): if EventType.has_value(f["type"]) and not f.get("isEvent"):
f["isEvent"] = True f["isEvent"] = True
elif FilterType.has_value(f["type"]) and f.get("isEvent"): elif FilterType.has_value(f["type"]) and f.get("isEvent"):
@ -779,15 +715,6 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
f["value"] = vals f["value"] = vals
return values return values
@model_validator(mode="after")
def check_pa_event_filter(self):
for v in self.filters + self.events:
if v.type == EventType.EVENT:
assert v.operator in (SearchEventOperator.IS, MathOperator.EQUAL), \
"operator must be {SearchEventOperator.IS} or {MathOperator.EQUAL} for EVENT type"
assert len(v.value) == 1, "value must have 1 single value for EVENT type"
return self
@model_validator(mode="after") @model_validator(mode="after")
def split_filters_events(self): def split_filters_events(self):
n_filters = [] n_filters = []
@ -1033,36 +960,6 @@ class CardSessionsSchema(_TimedSchema, _PaginatedSchema):
return self return self
# We don't need this as the UI is expecting filters to override the full series' filters
# @model_validator(mode="after")
# def __merge_out_filters_with_series(self):
# for f in self.filters:
# for s in self.series:
# found = False
#
# if f.is_event:
# sub = s.filter.events
# else:
# sub = s.filter.filters
#
# for e in sub:
# if f.type == e.type and f.operator == e.operator:
# found = True
# if f.is_event:
# # If extra event: append value
# for v in f.value:
# if v not in e.value:
# e.value.append(v)
# else:
# # If extra filter: override value
# e.value = f.value
# if not found:
# sub.append(f)
#
# self.filters = []
#
# return self
# UI is expecting filters to override the full series' filters # UI is expecting filters to override the full series' filters
@model_validator(mode="after") @model_validator(mode="after")
def __override_series_filters_with_outer_filters(self): def __override_series_filters_with_outer_filters(self):
@ -1133,6 +1030,16 @@ class CardTable(__CardSchema):
values["metricValue"] = [] values["metricValue"] = []
return values return values
@model_validator(mode="after")
def __enforce_AND_operator(self):
self.metric_of = MetricOfTable(self.metric_of)
if self.metric_of in (MetricOfTable.VISITED_URL, MetricOfTable.FETCH, \
MetricOfTable.VISITED_URL.value, MetricOfTable.FETCH.value):
for s in self.series:
if s.filter is not None:
s.filter.events_order = SearchEventOrder.AND
return self
@model_validator(mode="after") @model_validator(mode="after")
def __transform(self): def __transform(self):
self.metric_of = MetricOfTable(self.metric_of) self.metric_of = MetricOfTable(self.metric_of)
@ -1477,7 +1384,7 @@ class MetricSearchSchema(_PaginatedSchema):
mine_only: bool = Field(default=False) mine_only: bool = Field(default=False)
class _HeatMapSearchEventRaw(SessionSearchEventSchema): class _HeatMapSearchEventRaw(SessionSearchEventSchema2):
type: Literal[EventType.LOCATION] = Field(...) type: Literal[EventType.LOCATION] = Field(...)
@ -1602,30 +1509,3 @@ class TagCreate(TagUpdate):
class ScopeSchema(BaseModel): class ScopeSchema(BaseModel):
scope: int = Field(default=1, ge=1, le=2) scope: int = Field(default=1, ge=1, le=2)
class SessionModel(BaseModel):
duration: int
errorsCount: int
eventsCount: int
favorite: bool = Field(default=False)
issueScore: int
issueTypes: List[IssueType] = Field(default=[])
metadata: dict = Field(default={})
pagesCount: int
platform: str
projectId: int
sessionId: str
startTs: int
timezone: Optional[str]
userAnonymousId: Optional[str]
userBrowser: str
userCity: str
userCountry: str
userDevice: Optional[str]
userDeviceType: str
userId: Optional[str]
userOs: str
userState: str
userUuid: str
viewed: bool = Field(default=False)

View file

@ -19,15 +19,16 @@ const EVENTS_DEFINITION = {
} }
}; };
EVENTS_DEFINITION.emit = { EVENTS_DEFINITION.emit = {
NEW_AGENT: "NEW_AGENT", NEW_AGENT: "NEW_AGENT",
NO_AGENTS: "NO_AGENT", NO_AGENTS: "NO_AGENT",
AGENT_DISCONNECT: "AGENT_DISCONNECTED", AGENT_DISCONNECT: "AGENT_DISCONNECTED",
AGENTS_CONNECTED: "AGENTS_CONNECTED", AGENTS_CONNECTED: "AGENTS_CONNECTED",
NO_SESSIONS: "SESSION_DISCONNECTED", AGENTS_INFO_CONNECTED: "AGENTS_INFO_CONNECTED",
SESSION_ALREADY_CONNECTED: "SESSION_ALREADY_CONNECTED", NO_SESSIONS: "SESSION_DISCONNECTED",
SESSION_RECONNECTED: "SESSION_RECONNECTED", SESSION_ALREADY_CONNECTED: "SESSION_ALREADY_CONNECTED",
UPDATE_EVENT: EVENTS_DEFINITION.listen.UPDATE_EVENT, SESSION_RECONNECTED: "SESSION_RECONNECTED",
WEBRTC_CONFIG: "WEBRTC_CONFIG", UPDATE_EVENT: EVENTS_DEFINITION.listen.UPDATE_EVENT,
WEBRTC_CONFIG: "WEBRTC_CONFIG",
}; };
const BASE_sessionInfo = { const BASE_sessionInfo = {

View file

@ -42,7 +42,7 @@ const findSessionSocketId = async (io, roomId, tabId) => {
}; };
async function getRoomData(io, roomID) { async function getRoomData(io, roomID) {
let tabsCount = 0, agentsCount = 0, tabIDs = [], agentIDs = [], config = null; let tabsCount = 0, agentsCount = 0, tabIDs = [], agentIDs = [], config = null, agentInfos = [];
const connected_sockets = await io.in(roomID).fetchSockets(); const connected_sockets = await io.in(roomID).fetchSockets();
if (connected_sockets.length > 0) { if (connected_sockets.length > 0) {
for (let socket of connected_sockets) { for (let socket of connected_sockets) {
@ -52,6 +52,7 @@ async function getRoomData(io, roomID) {
} else { } else {
agentsCount++; agentsCount++;
agentIDs.push(socket.id); agentIDs.push(socket.id);
agentInfos.push({ ...socket.handshake.query.agentInfo, socketId: socket.id });
if (socket.handshake.query.config !== undefined) { if (socket.handshake.query.config !== undefined) {
config = socket.handshake.query.config; config = socket.handshake.query.config;
} }
@ -60,8 +61,10 @@ async function getRoomData(io, roomID) {
} else { } else {
tabsCount = -1; tabsCount = -1;
agentsCount = -1; agentsCount = -1;
agentInfos = [];
agentIDs = [];
} }
return {tabsCount, agentsCount, tabIDs, agentIDs, config}; return {tabsCount, agentsCount, tabIDs, agentIDs, config, agentInfos};
} }
function processNewSocket(socket) { function processNewSocket(socket) {
@ -81,7 +84,7 @@ async function onConnect(socket) {
IncreaseOnlineConnections(socket.handshake.query.identity); IncreaseOnlineConnections(socket.handshake.query.identity);
const io = getServer(); const io = getServer();
const {tabsCount, agentsCount, tabIDs, agentIDs, config} = await getRoomData(io, socket.handshake.query.roomId); const {tabsCount, agentsCount, tabIDs, agentInfos, agentIDs, config} = await getRoomData(io, socket.handshake.query.roomId);
if (socket.handshake.query.identity === IDENTITIES.session) { if (socket.handshake.query.identity === IDENTITIES.session) {
// Check if session with the same tabID already connected, if so, refuse new connexion // Check if session with the same tabID already connected, if so, refuse new connexion
@ -105,6 +108,7 @@ async function onConnect(socket) {
logger.debug(`notifying new session about agent-existence`); logger.debug(`notifying new session about agent-existence`);
io.to(socket.id).emit(EVENTS_DEFINITION.emit.WEBRTC_CONFIG, config); io.to(socket.id).emit(EVENTS_DEFINITION.emit.WEBRTC_CONFIG, config);
io.to(socket.id).emit(EVENTS_DEFINITION.emit.AGENTS_CONNECTED, agentIDs); io.to(socket.id).emit(EVENTS_DEFINITION.emit.AGENTS_CONNECTED, agentIDs);
io.to(socket.id).emit(EVENTS_DEFINITION.emit.AGENTS_INFO_CONNECTED, agentInfos);
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.SESSION_RECONNECTED, socket.id); socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.SESSION_RECONNECTED, socket.id);
} }
} else if (tabsCount <= 0) { } else if (tabsCount <= 0) {

View file

@ -2,11 +2,12 @@ package datasaver
import ( import (
"context" "context"
"encoding/json"
"openreplay/backend/pkg/db/types"
"openreplay/backend/internal/config/db" "openreplay/backend/internal/config/db"
"openreplay/backend/pkg/db/clickhouse" "openreplay/backend/pkg/db/clickhouse"
"openreplay/backend/pkg/db/postgres" "openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/db/types"
"openreplay/backend/pkg/logger" "openreplay/backend/pkg/logger"
. "openreplay/backend/pkg/messages" . "openreplay/backend/pkg/messages"
queue "openreplay/backend/pkg/queue/types" queue "openreplay/backend/pkg/queue/types"
@ -50,10 +51,6 @@ func New(log logger.Logger, cfg *db.Config, pg *postgres.Conn, ch clickhouse.Con
} }
func (s *saverImpl) Handle(msg Message) { func (s *saverImpl) Handle(msg Message) {
if msg.TypeID() == MsgCustomEvent {
defer s.Handle(types.WrapCustomEvent(msg.(*CustomEvent)))
}
var ( var (
sessCtx = context.WithValue(context.Background(), "sessionID", msg.SessionID()) sessCtx = context.WithValue(context.Background(), "sessionID", msg.SessionID())
session *sessions.Session session *sessions.Session
@ -69,6 +66,23 @@ func (s *saverImpl) Handle(msg Message) {
return return
} }
if msg.TypeID() == MsgCustomEvent {
m := msg.(*CustomEvent)
// Try to parse custom event payload to JSON and extract or_payload field
type CustomEventPayload struct {
CustomTimestamp uint64 `json:"or_timestamp"`
}
customPayload := &CustomEventPayload{}
if err := json.Unmarshal([]byte(m.Payload), customPayload); err == nil {
if customPayload.CustomTimestamp >= session.Timestamp {
s.log.Info(sessCtx, "custom event timestamp received: %v", m.Timestamp)
msg.Meta().Timestamp = customPayload.CustomTimestamp
s.log.Info(sessCtx, "custom event timestamp updated: %v", m.Timestamp)
}
}
defer s.Handle(types.WrapCustomEvent(m))
}
if IsMobileType(msg.TypeID()) { if IsMobileType(msg.TypeID()) {
if err := s.handleMobileMessage(sessCtx, session, msg); err != nil { if err := s.handleMobileMessage(sessCtx, session, msg); err != nil {
if !postgres.IsPkeyViolation(err) { if !postgres.IsPkeyViolation(err) {

View file

@ -111,12 +111,12 @@ var batches = map[string]string{
"pages": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, "pages": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"clicks": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, "clicks": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"inputs": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$duration_s", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, "inputs": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$duration_s", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"errors": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", error_id, "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, "errors": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", error_id, "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"performance": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, "performance": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"requests": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$duration_s", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, "requests": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$duration_s", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"custom": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, "custom": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"graphql": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, "graphql": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"issuesEvents": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", issue_type, issue_id, "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, "issuesEvents": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", issue_type, issue_id, "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"issues": "INSERT INTO experimental.issues (project_id, issue_id, type, context_string) VALUES (?, ?, ?, ?)", "issues": "INSERT INTO experimental.issues (project_id, issue_id, type, context_string) VALUES (?, ?, ?, ?)",
"mobile_sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, platform, timezone) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?, ?)", "mobile_sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, platform, timezone) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?, ?)",
"mobile_custom": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, "mobile_custom": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
@ -309,7 +309,6 @@ func (c *connectorImpl) InsertMouseThrashing(session *sessions.Session, msg *mes
session.UserOSVersion, session.UserOSVersion,
"mouse_thrashing", "mouse_thrashing",
issueID, issueID,
cropString(msg.Url),
jsonString, jsonString,
); err != nil { ); err != nil {
c.checkError("issuesEvents", err) c.checkError("issuesEvents", err)
@ -366,7 +365,6 @@ func (c *connectorImpl) InsertIssue(session *sessions.Session, msg *messages.Iss
session.UserOSVersion, session.UserOSVersion,
msg.Type, msg.Type,
issueID, issueID,
cropString(msg.Url),
jsonString, jsonString,
); err != nil { ); err != nil {
c.checkError("issuesEvents", err) c.checkError("issuesEvents", err)
@ -554,7 +552,6 @@ func (c *connectorImpl) InsertWebErrorEvent(session *sessions.Session, msg *type
session.Platform, session.Platform,
session.UserOSVersion, session.UserOSVersion,
msgID, msgID,
cropString(msg.Url),
jsonString, jsonString,
); err != nil { ); err != nil {
c.checkError("errors", err) c.checkError("errors", err)

View file

@ -84,10 +84,7 @@ func (p *poolImpl) Begin() (*Tx, error) {
tx, err := p.conn.Begin(context.Background()) tx, err := p.conn.Begin(context.Background())
p.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "begin", "") p.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "begin", "")
p.metrics.IncreaseTotalRequests("begin", "") p.metrics.IncreaseTotalRequests("begin", "")
return &Tx{ return &Tx{tx, p.metrics}, err
origTx: tx,
metrics: p.metrics,
}, err
} }
func (p *poolImpl) Close() { func (p *poolImpl) Close() {
@ -97,13 +94,13 @@ func (p *poolImpl) Close() {
// TX - start // TX - start
type Tx struct { type Tx struct {
origTx pgx.Tx pgx.Tx
metrics database.Database metrics database.Database
} }
func (tx *Tx) TxExec(sql string, args ...interface{}) error { func (tx *Tx) TxExec(sql string, args ...interface{}) error {
start := time.Now() start := time.Now()
_, err := tx.origTx.Exec(context.Background(), sql, args...) _, err := tx.Exec(context.Background(), sql, args...)
method, table := methodName(sql) method, table := methodName(sql)
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table) tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table)
tx.metrics.IncreaseTotalRequests(method, table) tx.metrics.IncreaseTotalRequests(method, table)
@ -112,7 +109,7 @@ func (tx *Tx) TxExec(sql string, args ...interface{}) error {
func (tx *Tx) TxQueryRow(sql string, args ...interface{}) pgx.Row { func (tx *Tx) TxQueryRow(sql string, args ...interface{}) pgx.Row {
start := time.Now() start := time.Now()
res := tx.origTx.QueryRow(context.Background(), sql, args...) res := tx.QueryRow(context.Background(), sql, args...)
method, table := methodName(sql) method, table := methodName(sql)
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table) tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table)
tx.metrics.IncreaseTotalRequests(method, table) tx.metrics.IncreaseTotalRequests(method, table)
@ -121,7 +118,7 @@ func (tx *Tx) TxQueryRow(sql string, args ...interface{}) pgx.Row {
func (tx *Tx) TxRollback() error { func (tx *Tx) TxRollback() error {
start := time.Now() start := time.Now()
err := tx.origTx.Rollback(context.Background()) err := tx.Rollback(context.Background())
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "rollback", "") tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "rollback", "")
tx.metrics.IncreaseTotalRequests("rollback", "") tx.metrics.IncreaseTotalRequests("rollback", "")
return err return err
@ -129,7 +126,7 @@ func (tx *Tx) TxRollback() error {
func (tx *Tx) TxCommit() error { func (tx *Tx) TxCommit() error {
start := time.Now() start := time.Now()
err := tx.origTx.Commit(context.Background()) err := tx.Commit(context.Background())
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "commit", "") tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "commit", "")
tx.metrics.IncreaseTotalRequests("commit", "") tx.metrics.IncreaseTotalRequests("commit", "")
return err return err

View file

@ -5,11 +5,10 @@ import (
"encoding/hex" "encoding/hex"
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/google/uuid"
"hash/fnv" "hash/fnv"
"strconv" "strconv"
"github.com/google/uuid"
. "openreplay/backend/pkg/messages" . "openreplay/backend/pkg/messages"
) )
@ -24,7 +23,41 @@ type ErrorEvent struct {
Payload string Payload string
Tags map[string]*string Tags map[string]*string
OriginType int OriginType int
Url string }
func unquote(s string) string {
if s[0] == '"' {
return s[1 : len(s)-1]
}
return s
}
func parseTags(tagsJSON string) (tags map[string]*string, err error) {
if len(tagsJSON) == 0 {
return nil, fmt.Errorf("empty tags")
}
if tagsJSON[0] == '[' {
var tagsArr []json.RawMessage
if err = json.Unmarshal([]byte(tagsJSON), &tagsArr); err != nil {
return
}
tags = make(map[string]*string)
for _, keyBts := range tagsArr {
tags[unquote(string(keyBts))] = nil
}
} else if tagsJSON[0] == '{' {
var tagsObj map[string]json.RawMessage
if err = json.Unmarshal([]byte(tagsJSON), &tagsObj); err != nil {
return
}
tags = make(map[string]*string)
for key, valBts := range tagsObj {
val := unquote(string(valBts))
tags[key] = &val
}
}
return
} }
func WrapJSException(m *JSException) (*ErrorEvent, error) { func WrapJSException(m *JSException) (*ErrorEvent, error) {
@ -36,7 +69,6 @@ func WrapJSException(m *JSException) (*ErrorEvent, error) {
Message: m.Message, Message: m.Message,
Payload: m.Payload, Payload: m.Payload,
OriginType: m.TypeID(), OriginType: m.TypeID(),
Url: m.Url,
}, nil }, nil
} }
@ -49,7 +81,6 @@ func WrapIntegrationEvent(m *IntegrationEvent) *ErrorEvent {
Message: m.Message, Message: m.Message,
Payload: m.Payload, Payload: m.Payload,
OriginType: m.TypeID(), OriginType: m.TypeID(),
Url: m.Url,
} }
} }

View file

@ -135,6 +135,11 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req
// Add tracker version to context // Add tracker version to context
r = r.WithContext(context.WithValue(r.Context(), "tracker", req.TrackerVersion)) r = r.WithContext(context.WithValue(r.Context(), "tracker", req.TrackerVersion))
if err := validateTrackerVersion(req.TrackerVersion); err != nil {
e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err)
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize)
return
}
// Handler's logic // Handler's logic
if req.ProjectKey == nil { if req.ProjectKey == nil {
@ -157,13 +162,6 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req
// Add projectID to context // Add projectID to context
r = r.WithContext(context.WithValue(r.Context(), "projectID", fmt.Sprintf("%d", p.ProjectID))) r = r.WithContext(context.WithValue(r.Context(), "projectID", fmt.Sprintf("%d", p.ProjectID)))
// Validate tracker version
if err := validateTrackerVersion(req.TrackerVersion); err != nil {
e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err)
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize)
return
}
// Check if the project supports mobile sessions // Check if the project supports mobile sessions
if !p.IsWeb() { if !p.IsWeb() {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusForbidden, errors.New("project doesn't support web sessions"), startTime, r.URL.Path, bodySize) e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusForbidden, errors.New("project doesn't support web sessions"), startTime, r.URL.Path, bodySize)

View file

@ -29,7 +29,7 @@ type Task struct {
Duration int Duration int
Status string Status string
Path string Path string
tx *pool.Tx tx pool.Tx
} }
func (t *Task) HasToTrim() bool { func (t *Task) HasToTrim() bool {
@ -65,7 +65,7 @@ func (t *tasksImpl) Get() (task *Task, err error) {
} }
}() }()
task = &Task{tx: tx} task = &Task{tx: pool.Tx{Tx: tx}}
sql := `SELECT spot_id, crop, duration FROM spots.tasks WHERE status = 'pending' ORDER BY added_time FOR UPDATE SKIP LOCKED LIMIT 1` sql := `SELECT spot_id, crop, duration FROM spots.tasks WHERE status = 'pending' ORDER BY added_time FOR UPDATE SKIP LOCKED LIMIT 1`
err = tx.TxQueryRow(sql).Scan(&task.SpotID, &task.Crop, &task.Duration) err = tx.TxQueryRow(sql).Scan(&task.SpotID, &task.Crop, &task.Duration)
if err != nil { if err != nil {

View file

@ -52,7 +52,6 @@ func NewTranscoder(cfg *spot.Config, log logger.Logger, objStorage objectstorage
tasks: NewTasks(conn), tasks: NewTasks(conn),
streams: NewStreams(log, conn, objStorage), streams: NewStreams(log, conn, objStorage),
spots: spots, spots: spots,
metrics: metrics,
} }
tnsc.prepareWorkers = workers.NewPool(2, 4, tnsc.prepare) tnsc.prepareWorkers = workers.NewPool(2, 4, tnsc.prepare)
tnsc.transcodeWorkers = workers.NewPool(2, 4, tnsc.transcode) tnsc.transcodeWorkers = workers.NewPool(2, 4, tnsc.transcode)

6
ee/api/.gitignore vendored
View file

@ -223,14 +223,10 @@ Pipfile.lock
/chalicelib/core/sessions/performance_event.py /chalicelib/core/sessions/performance_event.py
/chalicelib/core/sessions/sessions_viewed/sessions_viewed.py /chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
/chalicelib/core/sessions/unprocessed_sessions.py /chalicelib/core/sessions/unprocessed_sessions.py
/chalicelib/core/sessions/__init__.py
/chalicelib/core/sessions/sessions_legacy_mobil.py
/chalicelib/core/sessions/sessions_search_exp.py
/chalicelib/core/metrics/modules /chalicelib/core/metrics/modules
/chalicelib/core/socket_ios.py /chalicelib/core/socket_ios.py
/chalicelib/core/sourcemaps /chalicelib/core/sourcemaps
/chalicelib/core/tags.py /chalicelib/core/tags.py
/chalicelib/core/product_analytics
/chalicelib/saml /chalicelib/saml
/chalicelib/utils/__init__.py /chalicelib/utils/__init__.py
/chalicelib/utils/args_transformer.py /chalicelib/utils/args_transformer.py
@ -293,5 +289,3 @@ Pipfile.lock
/chalicelib/core/errors/errors_ch.py /chalicelib/core/errors/errors_ch.py
/chalicelib/core/errors/errors_details.py /chalicelib/core/errors/errors_details.py
/chalicelib/utils/contextual_validators.py /chalicelib/utils/contextual_validators.py
/routers/subs/product_analytics.py
/schemas/product_analytics.py

View file

@ -6,23 +6,25 @@ name = "pypi"
[packages] [packages]
urllib3 = "==2.3.0" urllib3 = "==2.3.0"
requests = "==2.32.3" requests = "==2.32.3"
boto3 = "==1.37.21" boto3 = "==1.36.12"
pyjwt = "==2.10.1" pyjwt = "==2.10.1"
psycopg2-binary = "==2.9.10" psycopg2-binary = "==2.9.10"
psycopg = {extras = ["pool", "binary"], version = "==3.2.6"} psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
clickhouse-connect = "==0.8.15" clickhouse-connect = "==0.8.15"
elasticsearch = "==8.17.2" elasticsearch = "==8.17.1"
jira = "==3.8.0" jira = "==3.8.0"
cachetools = "==5.5.2" cachetools = "==5.5.1"
fastapi = "==0.115.12" fastapi = "==0.115.8"
uvicorn = {extras = ["standard"], version = "==0.34.0"} uvicorn = {extras = ["standard"], version = "==0.34.0"}
gunicorn = "==23.0.0" gunicorn = "==23.0.0"
python-decouple = "==3.8" python-decouple = "==3.8"
pydantic = {extras = ["email"], version = "==2.10.6"} pydantic = {extras = ["email"], version = "==2.10.6"}
apscheduler = "==3.11.0" apscheduler = "==3.11.0"
python3-saml = "==1.16.0"
python-multipart = "==0.0.20" python-multipart = "==0.0.20"
redis = "==5.2.1" redis = "==5.2.1"
azure-storage-blob = "==12.25.0" azure-storage-blob = "==12.24.1"
[dev-packages] [dev-packages]

View file

@ -21,7 +21,7 @@ from chalicelib.utils import pg_client, ch_client
from crons import core_crons, ee_crons, core_dynamic_crons from crons import core_crons, ee_crons, core_dynamic_crons
from routers import core, core_dynamic from routers import core, core_dynamic
from routers import ee from routers import ee
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_analytics from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
from routers.subs import v1_api_ee from routers.subs import v1_api_ee
if config("ENABLE_SSO", cast=bool, default=True): if config("ENABLE_SSO", cast=bool, default=True):
@ -150,9 +150,9 @@ app.include_router(spot.public_app)
app.include_router(spot.app) app.include_router(spot.app)
app.include_router(spot.app_apikey) app.include_router(spot.app_apikey)
app.include_router(product_analytics.public_app, prefix="/ap") app.include_router(product_anaytics.public_app)
app.include_router(product_analytics.app, prefix="/ap") app.include_router(product_anaytics.app)
app.include_router(product_analytics.app_apikey, prefix="/ap") app.include_router(product_anaytics.app_apikey)
if config("ENABLE_SSO", cast=bool, default=True): if config("ENABLE_SSO", cast=bool, default=True):
app.include_router(saml.public_app) app.include_router(saml.public_app)

View file

@ -86,7 +86,8 @@ def __generic_query(typename, value_length=None):
ORDER BY value""" ORDER BY value"""
if value_length is None or value_length > 2: if value_length is None or value_length > 2:
return f"""(SELECT DISTINCT value, type return f"""SELECT DISTINCT ON(value, type) value, type
FROM ((SELECT DISTINCT value, type
FROM {TABLE} FROM {TABLE}
WHERE WHERE
project_id = %(project_id)s project_id = %(project_id)s
@ -102,7 +103,7 @@ def __generic_query(typename, value_length=None):
AND type='{typename.upper()}' AND type='{typename.upper()}'
AND value ILIKE %(value)s AND value ILIKE %(value)s
ORDER BY value ORDER BY value
LIMIT 5);""" LIMIT 5)) AS raw;"""
return f"""SELECT DISTINCT value, type return f"""SELECT DISTINCT value, type
FROM {TABLE} FROM {TABLE}
WHERE WHERE
@ -257,7 +258,7 @@ def __search_metadata(project_id, value, key=None, source=None):
WHERE project_id = %(project_id)s WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)""") AND {colname} ILIKE %(svalue)s LIMIT 5)""")
with ch_client.ClickHouseClient() as cur: with ch_client.ClickHouseClient() as cur:
query = cur.format(query=f"""SELECT key, value, 'METADATA' AS TYPE query = cur.format(query=f"""SELECT DISTINCT ON(key, value) key, value, 'METADATA' AS TYPE
FROM({" UNION ALL ".join(sub_from)}) AS all_metas FROM({" UNION ALL ".join(sub_from)}) AS all_metas
LIMIT 5;""", parameters={"project_id": project_id, "value": helper.string_to_sql_like(value), LIMIT 5;""", parameters={"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}) "svalue": helper.string_to_sql_like("^" + value)})

View file

@ -71,7 +71,7 @@ def get_details(project_id, error_id, user_id, **data):
MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(0) MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(0)
ch_basic_query = errors_helper.__get_basic_constraints_ch(time_constraint=False) ch_basic_query = errors_helper.__get_basic_constraints_ch(time_constraint=False)
ch_basic_query.append("toString(`$properties`.error_id) = %(error_id)s") ch_basic_query.append("error_id = %(error_id)s")
with ch_client.ClickHouseClient() as ch: with ch_client.ClickHouseClient() as ch:
data["startDate24"] = TimeUTC.now(-1) data["startDate24"] = TimeUTC.now(-1)
@ -95,7 +95,7 @@ def get_details(project_id, error_id, user_id, **data):
"error_id": error_id} "error_id": error_id}
main_ch_query = f"""\ main_ch_query = f"""\
WITH pre_processed AS (SELECT toString(`$properties`.error_id) AS error_id, WITH pre_processed AS (SELECT error_id,
toString(`$properties`.name) AS name, toString(`$properties`.name) AS name,
toString(`$properties`.message) AS message, toString(`$properties`.message) AS message,
session_id, session_id,
@ -183,7 +183,7 @@ def get_details(project_id, error_id, user_id, **data):
AND `$event_name` = 'ERROR' AND `$event_name` = 'ERROR'
AND events.created_at >= toDateTime(timestamp / 1000) AND events.created_at >= toDateTime(timestamp / 1000)
AND events.created_at < toDateTime((timestamp + %(step_size24)s) / 1000) AND events.created_at < toDateTime((timestamp + %(step_size24)s) / 1000)
AND toString(`$properties`.error_id) = %(error_id)s AND error_id = %(error_id)s
GROUP BY timestamp GROUP BY timestamp
ORDER BY timestamp) AS chart_details ORDER BY timestamp) AS chart_details
) AS chart_details24 ON TRUE ) AS chart_details24 ON TRUE
@ -196,7 +196,7 @@ def get_details(project_id, error_id, user_id, **data):
AND `$event_name` = 'ERROR' AND `$event_name` = 'ERROR'
AND events.created_at >= toDateTime(timestamp / 1000) AND events.created_at >= toDateTime(timestamp / 1000)
AND events.created_at < toDateTime((timestamp + %(step_size30)s) / 1000) AND events.created_at < toDateTime((timestamp + %(step_size30)s) / 1000)
AND toString(`$properties`.error_id) = %(error_id)s AND error_id = %(error_id)s
GROUP BY timestamp GROUP BY timestamp
ORDER BY timestamp) AS chart_details ORDER BY timestamp) AS chart_details
) AS chart_details30 ON TRUE;""" ) AS chart_details30 ON TRUE;"""

View file

@ -0,0 +1,17 @@
import logging
from decouple import config
logger = logging.getLogger(__name__)
from . import sessions_pg
from . import sessions_pg as sessions_legacy
from . import sessions_ch
from . import sessions_search as sessions_search_legacy
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
logger.info(">>> Using experimental sessions search")
from . import sessions_ch as sessions
from . import sessions_search_exp as sessions_search
else:
from . import sessions_pg as sessions
from . import sessions_search as sessions_search

View file

@ -1,5 +1,6 @@
import ast import ast
import logging import logging
from typing import List, Union
import schemas import schemas
from chalicelib.core import events, metadata, projects from chalicelib.core import events, metadata, projects
@ -218,7 +219,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
} }
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema): def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS, return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
schemas.EventType.GRAPHQL] \ schemas.EventType.GRAPHQL] \
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE, or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,

View file

@ -175,11 +175,11 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
ORDER BY sort_key {data.order} ORDER BY sort_key {data.order}
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s) AS sorted_sessions;""", LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s) AS sorted_sessions;""",
parameters=full_args) parameters=full_args)
logging.debug("--------------------")
logging.debug(main_query)
logging.debug("--------------------")
try: try:
logging.debug("--------------------")
sessions_list = cur.execute(main_query) sessions_list = cur.execute(main_query)
logging.debug("--------------------")
except Exception as err: except Exception as err:
logging.warning("--------- SESSIONS-CH SEARCH QUERY EXCEPTION -----------") logging.warning("--------- SESSIONS-CH SEARCH QUERY EXCEPTION -----------")
logging.warning(main_query) logging.warning(main_query)
@ -262,7 +262,7 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
FROM public.user_favorite_sessions FROM public.user_favorite_sessions
WHERE user_favorite_sessions.user_id = %(userId)s WHERE user_favorite_sessions.user_id = %(userId)s
) AS favorite_sessions USING (session_id) ) AS favorite_sessions USING (session_id)
WHERE s.project_id = %(id)s AND isNotNull(s.duration) AND s.{col_name} = %(value)s WHERE s.project_id = %(id)s AND s.duration IS NOT NULL AND s.{col_name} = %(value)s
) AS full_sessions ) AS full_sessions
ORDER BY favorite DESC, issue_score DESC ORDER BY favorite DESC, issue_score DESC
LIMIT 10 LIMIT 10

View file

@ -927,12 +927,12 @@ def authenticate_sso(email: str, internal_id: str):
aud=AUDIENCE, jwt_jti=j_r.jwt_refresh_jti), aud=AUDIENCE, jwt_jti=j_r.jwt_refresh_jti),
"refreshTokenMaxAge": config("JWT_REFRESH_EXPIRATION", cast=int), "refreshTokenMaxAge": config("JWT_REFRESH_EXPIRATION", cast=int),
"spotJwt": authorizers.generate_jwt(user_id=r['userId'], tenant_id=r['tenantId'], "spotJwt": authorizers.generate_jwt(user_id=r['userId'], tenant_id=r['tenantId'],
iat=j_r.spot_jwt_iat, aud=spot.AUDIENCE), iat=j_r.spot_jwt_iat, aud=spot.AUDIENCE, for_spot=True),
"spotRefreshToken": authorizers.generate_jwt_refresh(user_id=r['userId'], "spotRefreshToken": authorizers.generate_jwt_refresh(user_id=r['userId'],
tenant_id=r['tenantId'], tenant_id=r['tenantId'],
iat=j_r.spot_jwt_refresh_iat, iat=j_r.spot_jwt_refresh_iat,
aud=spot.AUDIENCE, aud=spot.AUDIENCE,
jwt_jti=j_r.spot_jwt_refresh_jti), jwt_jti=j_r.spot_jwt_refresh_jti, for_spot=True),
"spotRefreshTokenMaxAge": config("JWT_SPOT_REFRESH_EXPIRATION", cast=int) "spotRefreshTokenMaxAge": config("JWT_SPOT_REFRESH_EXPIRATION", cast=int)
} }
return response return response

View file

@ -44,15 +44,11 @@ rm -rf ./chalicelib/core/sessions/sessions_search.py
rm -rf ./chalicelib/core/sessions/performance_event.py rm -rf ./chalicelib/core/sessions/performance_event.py
rm -rf ./chalicelib/core/sessions/sessions_viewed/sessions_viewed.py rm -rf ./chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
rm -rf ./chalicelib/core/sessions/unprocessed_sessions.py rm -rf ./chalicelib/core/sessions/unprocessed_sessions.py
rm -rf ./chalicelib/core/sessions/__init__.py
rm -rf ./chalicelib/core/sessions/sessions_legacy_mobil.py
rm -rf ./chalicelib/core/sessions/sessions_search_exp.py
rm -rf ./chalicelib/core/metrics/modules rm -rf ./chalicelib/core/metrics/modules
rm -rf ./chalicelib/core/socket_ios.py rm -rf ./chalicelib/core/socket_ios.py
rm -rf ./chalicelib/core/sourcemaps rm -rf ./chalicelib/core/sourcemaps
rm -rf ./chalicelib/core/user_testing.py rm -rf ./chalicelib/core/user_testing.py
rm -rf ./chalicelib/core/tags.py rm -rf ./chalicelib/core/tags.py
rm -rf ./chalicelib/core/product_analytics
rm -rf ./chalicelib/saml rm -rf ./chalicelib/saml
rm -rf ./chalicelib/utils/__init__.py rm -rf ./chalicelib/utils/__init__.py
rm -rf ./chalicelib/utils/args_transformer.py rm -rf ./chalicelib/utils/args_transformer.py
@ -113,5 +109,3 @@ rm -rf ./chalicelib/core/errors/errors_pg.py
rm -rf ./chalicelib/core/errors/errors_ch.py rm -rf ./chalicelib/core/errors/errors_ch.py
rm -rf ./chalicelib/core/errors/errors_details.py rm -rf ./chalicelib/core/errors/errors_details.py
rm -rf ./chalicelib/utils/contextual_validators.py rm -rf ./chalicelib/utils/contextual_validators.py
rm -rf ./routers/subs/product_analytics.py
rm -rf ./schemas/product_analytics.py

View file

@ -1,18 +1,19 @@
urllib3==2.3.0 urllib3==2.3.0
requests==2.32.3 requests==2.32.3
boto3==1.37.21 boto3==1.36.12
pyjwt==2.10.1 pyjwt==2.10.1
psycopg2-binary==2.9.10 psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.6 psycopg[pool,binary]==3.2.4
clickhouse-driver[lz4]==0.2.9
clickhouse-connect==0.8.15 clickhouse-connect==0.8.15
elasticsearch==8.17.2 elasticsearch==8.17.1
jira==3.8.0 jira==3.8.0
cachetools==5.5.2 cachetools==5.5.1
fastapi==0.115.12 fastapi==0.115.8
uvicorn[standard]==0.34.0 uvicorn[standard]==0.34.0
python-decouple==3.8 python-decouple==3.8
pydantic[email]==2.10.6 pydantic[email]==2.10.6
apscheduler==3.11.0 apscheduler==3.11.0
azure-storage-blob==12.25.0 azure-storage-blob==12.24.1

View file

@ -1,18 +1,19 @@
urllib3==2.3.0 urllib3==2.3.0
requests==2.32.3 requests==2.32.3
boto3==1.37.21 boto3==1.36.12
pyjwt==2.10.1 pyjwt==2.10.1
psycopg2-binary==2.9.10 psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.6 psycopg[pool,binary]==3.2.4
clickhouse-driver[lz4]==0.2.9
clickhouse-connect==0.8.15 clickhouse-connect==0.8.15
elasticsearch==8.17.2 elasticsearch==8.17.1
jira==3.8.0 jira==3.8.0
cachetools==5.5.2 cachetools==5.5.1
fastapi==0.115.12 fastapi==0.115.8
python-decouple==3.8 python-decouple==3.8
pydantic[email]==2.10.6 pydantic[email]==2.10.6
apscheduler==3.11.0 apscheduler==3.11.0
redis==5.2.1 redis==5.2.1
azure-storage-blob==12.25.0 azure-storage-blob==12.24.1

View file

@ -1,15 +1,16 @@
urllib3==2.3.0 urllib3==2.3.0
requests==2.32.3 requests==2.32.3
boto3==1.37.21 boto3==1.36.12
pyjwt==2.10.1 pyjwt==2.10.1
psycopg2-binary==2.9.10 psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.6 psycopg[pool,binary]==3.2.4
clickhouse-driver[lz4]==0.2.9
clickhouse-connect==0.8.15 clickhouse-connect==0.8.15
elasticsearch==8.17.2 elasticsearch==8.17.1
jira==3.8.0 jira==3.8.0
cachetools==5.5.2 cachetools==5.5.1
fastapi==0.115.12 fastapi==0.115.8
uvicorn[standard]==0.34.0 uvicorn[standard]==0.34.0
gunicorn==23.0.0 gunicorn==23.0.0
python-decouple==3.8 python-decouple==3.8
@ -18,9 +19,10 @@ apscheduler==3.11.0
# TODO: enable after xmlsec fix https://github.com/xmlsec/python-xmlsec/issues/252 # TODO: enable after xmlsec fix https://github.com/xmlsec/python-xmlsec/issues/252
#--no-binary is used to avoid libxml2 library version incompatibilities between xmlsec and lxml #--no-binary is used to avoid libxml2 library version incompatibilities between xmlsec and lxml
python3-saml==1.16.0
--no-binary=lxml
python-multipart==0.0.20 python-multipart==0.0.20
redis==5.2.1 redis==5.2.1
#confluent-kafka==2.1.0 #confluent-kafka==2.1.0
azure-storage-blob==12.25.0 azure-storage-blob==12.24.1

View file

@ -1,5 +1,4 @@
from .schemas import * from .schemas import *
from .schemas_ee import * from .schemas_ee import *
from .assist_stats_schema import * from .assist_stats_schema import *
from .product_analytics import *
from . import overrides as _overrides from . import overrides as _overrides

View file

@ -4,7 +4,7 @@ from pydantic import Field, EmailStr, field_validator, model_validator
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
from . import schemas from . import schemas
from .overrides import BaseModel, Enum from .overrides import BaseModel, Enum, ORUnion
from .transformers_validators import remove_whitespace from .transformers_validators import remove_whitespace
@ -91,6 +91,33 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema):
return values return values
class SessionModel(BaseModel):
duration: int
errorsCount: int
eventsCount: int
favorite: bool = Field(default=False)
issueScore: int
issueTypes: List[schemas.IssueType] = Field(default=[])
metadata: dict = Field(default={})
pagesCount: int
platform: str
projectId: int
sessionId: str
startTs: int
timezone: Optional[str]
userAnonymousId: Optional[str]
userBrowser: str
userCity: str
userCountry: str
userDevice: Optional[str]
userDeviceType: str
userId: Optional[str]
userOs: str
userState: str
userUuid: str
viewed: bool = Field(default=False)
class AssistRecordUpdatePayloadSchema(BaseModel): class AssistRecordUpdatePayloadSchema(BaseModel):
name: str = Field(..., min_length=1) name: str = Field(..., min_length=1)
_transform_name = field_validator('name', mode="before")(remove_whitespace) _transform_name = field_validator('name', mode="before")(remove_whitespace)

View file

@ -121,16 +121,7 @@ func (s *storageImpl) Get(sessionID uint64) (*Session, error) {
// For the ender service only // For the ender service only
func (s *storageImpl) GetMany(sessionIDs []uint64) ([]*Session, error) { func (s *storageImpl) GetMany(sessionIDs []uint64) ([]*Session, error) {
rows, err := s.db.Query(` rows, err := s.db.Query("SELECT session_id, COALESCE( duration, 0 ), start_ts FROM sessions WHERE session_id = ANY($1)", pq.Array(sessionIDs))
SELECT
session_id,
CASE
WHEN duration IS NULL OR duration < 0 THEN 0
ELSE duration
END,
start_ts
FROM sessions
WHERE session_id = ANY($1)`, pq.Array(sessionIDs))
if err != nil { if err != nil {
return nil, err return nil, err
} }

View file

@ -1,3 +1,16 @@
SELECT 1
FROM (SELECT throwIf(platform = 'ios', 'IOS sessions found')
FROM experimental.sessions) AS raw
LIMIT 1;
SELECT 1
FROM (SELECT throwIf(platform = 'android', 'Android sessions found')
FROM experimental.sessions) AS raw
LIMIT 1;
ALTER TABLE experimental.sessions
MODIFY COLUMN platform Enum8('web'=1,'mobile'=2) DEFAULT 'web';
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee'; CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
SET allow_experimental_json_type = 1; SET allow_experimental_json_type = 1;
@ -151,8 +164,7 @@ CREATE TABLE IF NOT EXISTS product_analytics.events
_timestamp DateTime DEFAULT now() _timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp) ) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, "$event_name", created_at, session_id) ORDER BY (project_id, "$event_name", created_at, session_id)
TTL _timestamp + INTERVAL 1 MONTH , TTL _deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
_deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
-- The list of events that should not be ingested, -- The list of events that should not be ingested,
-- according to a specific event_name and optional properties -- according to a specific event_name and optional properties

View file

@ -1,168 +0,0 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0-ee';
DROP TABLE IF EXISTS product_analytics.all_events;
CREATE TABLE IF NOT EXISTS product_analytics.all_events
(
project_id UInt16,
auto_captured BOOL DEFAULT FALSE,
event_name String,
display_name String DEFAULT '',
description String DEFAULT '',
event_count_l30days UInt32 DEFAULT 0,
query_count_l30days UInt32 DEFAULT 0,
created_at DateTime64,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, auto_captured, event_name);
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.all_events_extractor_mv
TO product_analytics.all_events AS
SELECT DISTINCT ON (project_id,auto_captured,event_name) project_id,
`$auto_captured` AS auto_captured,
`$event_name` AS event_name,
display_name,
description
FROM product_analytics.events
LEFT JOIN (SELECT project_id,
auto_captured,
event_name,
display_name,
description
FROM product_analytics.all_events
WHERE all_events.display_name != ''
OR all_events.description != '') AS old_data
ON (events.project_id = old_data.project_id AND events.`$auto_captured` = old_data.auto_captured AND
events.`$event_name` = old_data.event_name);
CREATE TABLE IF NOT EXISTS product_analytics.event_properties
(
project_id UInt16,
event_name String,
property_name String,
value_type String,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, event_name, property_name, value_type);
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.event_properties_extractor_mv
TO product_analytics.event_properties AS
SELECT project_id,
`$event_name` AS event_name,
property_name,
JSONType(JSONExtractRaw(toString(`$properties`), property_name)) AS value_type
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name;
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.event_cproperties_extractor
TO product_analytics.event_properties AS
SELECT project_id,
`$event_name` AS event_name,
property_name,
JSONType(JSONExtractRaw(toString(`properties`), property_name)) AS value_type
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`properties`)) as property_name;
DROP TABLE IF EXISTS product_analytics.all_properties;
CREATE TABLE IF NOT EXISTS product_analytics.all_properties
(
project_id UInt16,
property_name String,
is_event_property BOOL,
display_name String DEFAULT '',
description String DEFAULT '',
status String DEFAULT 'visible' COMMENT 'visible/hidden/dropped',
data_count UInt32 DEFAULT 1,
query_count UInt32 DEFAULT 0,
created_at DateTime64,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, property_name, is_event_property);
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.all_properties_extractor_mv
TO product_analytics.all_properties AS
SELECT project_id,
property_name,
TRUE AS is_event_property,
display_name,
description,
status,
data_count,
query_count
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name
LEFT JOIN (SELECT project_id,
property_name,
display_name,
description,
status,
data_count,
query_count
FROM product_analytics.all_properties
WHERE (all_properties.display_name != ''
OR all_properties.description != '')
AND is_event_property) AS old_data
ON (events.project_id = old_data.project_id AND property_name = old_data.property_name);
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.all_cproperties_extractor_mv
TO product_analytics.all_properties AS
SELECT project_id,
property_name,
TRUE AS is_event_property,
display_name,
description,
status,
data_count,
query_count
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`properties`)) as property_name
LEFT JOIN (SELECT project_id,
property_name,
display_name,
description,
status,
data_count,
query_count
FROM product_analytics.all_properties
WHERE (all_properties.display_name != ''
OR all_properties.description != '')
AND is_event_property) AS old_data
ON (events.project_id = old_data.project_id AND property_name = old_data.property_name);
CREATE TABLE IF NOT EXISTS product_analytics.property_values_samples
(
project_id UInt16,
property_name String,
is_event_property BOOL,
value String,
_timestamp DateTime DEFAULT now()
)
ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, property_name, is_event_property);
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.property_values_sampler_mv
REFRESH EVERY 30 HOUR TO product_analytics.property_values_samples AS
SELECT project_id,
property_name,
TRUE AS is_event_property,
JSONExtractString(toString(`$properties`), property_name) AS value
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name
WHERE randCanonical() < 0.5 -- This randomly skips inserts
AND value != ''
LIMIT 2 BY project_id,property_name
UNION ALL
SELECT project_id,
property_name,
TRUE AS is_event_property,
JSONExtractString(toString(`properties`), property_name) AS value
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`properties`)) as property_name
WHERE randCanonical() < 0.5 -- This randomly skips inserts
AND value != ''
LIMIT 2 BY project_id,property_name;

View file

@ -1,4 +1,4 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0-ee'; CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
CREATE DATABASE IF NOT EXISTS experimental; CREATE DATABASE IF NOT EXISTS experimental;
CREATE TABLE IF NOT EXISTS experimental.autocomplete CREATE TABLE IF NOT EXISTS experimental.autocomplete
@ -9,8 +9,7 @@ CREATE TABLE IF NOT EXISTS experimental.autocomplete
_timestamp DateTime DEFAULT now() _timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp) ) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp) PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, type, value) ORDER BY (project_id, type, value);
TTL _timestamp + INTERVAL 1 MONTH;
CREATE TABLE IF NOT EXISTS experimental.events CREATE TABLE IF NOT EXISTS experimental.events
( (
@ -87,8 +86,7 @@ CREATE TABLE IF NOT EXISTS experimental.events
_timestamp DateTime DEFAULT now() _timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp) ) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime) PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id, message_id) ORDER BY (project_id, datetime, event_type, session_id, message_id);
TTL datetime + INTERVAL 1 MONTH;
@ -108,7 +106,7 @@ CREATE TABLE IF NOT EXISTS experimental.sessions
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122,'BU'=123, 'VD'=124, 'YD'=125, 'DD'=126), user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122,'BU'=123, 'VD'=124, 'YD'=125, 'DD'=126),
user_city LowCardinality(String), user_city LowCardinality(String),
user_state LowCardinality(String), user_state LowCardinality(String),
platform Enum8('web'=1,'ios'=2,'android'=3) DEFAULT 'web', platform Enum8('web'=1,'mobile'=2) DEFAULT 'web',
datetime DateTime, datetime DateTime,
timezone LowCardinality(Nullable(String)), timezone LowCardinality(Nullable(String)),
duration UInt32, duration UInt32,
@ -140,7 +138,6 @@ CREATE TABLE IF NOT EXISTS experimental.sessions
) ENGINE = ReplacingMergeTree(_timestamp) ) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMMDD(datetime) PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, session_id) ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 1 MONTH
SETTINGS index_granularity = 512; SETTINGS index_granularity = 512;
CREATE TABLE IF NOT EXISTS experimental.user_favorite_sessions CREATE TABLE IF NOT EXISTS experimental.user_favorite_sessions
@ -152,8 +149,7 @@ CREATE TABLE IF NOT EXISTS experimental.user_favorite_sessions
sign Int8 sign Int8
) ENGINE = CollapsingMergeTree(sign) ) ENGINE = CollapsingMergeTree(sign)
PARTITION BY toYYYYMM(_timestamp) PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, session_id) ORDER BY (project_id, user_id, session_id);
TTL _timestamp + INTERVAL 3 MONTH;
CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
( (
@ -163,8 +159,7 @@ CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
_timestamp DateTime DEFAULT now() _timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp) ) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp) PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, session_id) ORDER BY (project_id, user_id, session_id);
TTL _timestamp + INTERVAL 3 MONTH;
CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors
( (
@ -174,8 +169,7 @@ CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors
_timestamp DateTime DEFAULT now() _timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp) ) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp) PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, error_id) ORDER BY (project_id, user_id, error_id);
TTL _timestamp + INTERVAL 3 MONTH;
CREATE TABLE IF NOT EXISTS experimental.issues CREATE TABLE IF NOT EXISTS experimental.issues
( (
@ -188,8 +182,7 @@ CREATE TABLE IF NOT EXISTS experimental.issues
_timestamp DateTime DEFAULT now() _timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp) ) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp) PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, issue_id, type) ORDER BY (project_id, issue_id, type);
TTL _timestamp + INTERVAL 1 MONTH;
@ -292,8 +285,7 @@ CREATE TABLE IF NOT EXISTS experimental.sessions_feature_flags
_timestamp DateTime DEFAULT now() _timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp) ) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime) PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, session_id, feature_flag_id, condition_id) ORDER BY (project_id, datetime, session_id, feature_flag_id, condition_id);
TTL datetime + INTERVAL 3 MONTH;
CREATE TABLE IF NOT EXISTS experimental.ios_events CREATE TABLE IF NOT EXISTS experimental.ios_events
( (
@ -329,8 +321,7 @@ CREATE TABLE IF NOT EXISTS experimental.ios_events
_timestamp DateTime DEFAULT now() _timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp) ) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime) PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id, message_id) ORDER BY (project_id, datetime, event_type, session_id, message_id);
TTL datetime + INTERVAL 1 MONTH;
SET allow_experimental_json_type = 1; SET allow_experimental_json_type = 1;
@ -484,8 +475,7 @@ CREATE TABLE IF NOT EXISTS product_analytics.events
_timestamp DateTime DEFAULT now() _timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp) ) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, "$event_name", created_at, session_id) ORDER BY (project_id, "$event_name", created_at, session_id)
TTL _timestamp + INTERVAL 1 MONTH , TTL _deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
_deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
-- The list of events that should not be ingested, -- The list of events that should not be ingested,
-- according to a specific event_name and optional properties -- according to a specific event_name and optional properties
@ -639,11 +629,9 @@ CREATE TABLE IF NOT EXISTS product_analytics.group_properties
-- The full list of events -- The full list of events
-- Experimental: This table is filled by an incremental materialized view
CREATE TABLE IF NOT EXISTS product_analytics.all_events CREATE TABLE IF NOT EXISTS product_analytics.all_events
( (
project_id UInt16, project_id UInt16,
auto_captured BOOL DEFAULT FALSE,
event_name String, event_name String,
display_name String DEFAULT '', display_name String DEFAULT '',
description String DEFAULT '', description String DEFAULT '',
@ -653,68 +641,10 @@ CREATE TABLE IF NOT EXISTS product_analytics.all_events
created_at DateTime64, created_at DateTime64,
_timestamp DateTime DEFAULT now() _timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp) ) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, auto_captured, event_name); ORDER BY (project_id, event_name);
-- ----------------- This is experimental, if it doesn't work, we need to do it in db worker -------------
-- Incremental materialized view to fill all_events using $properties
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.all_events_extractor_mv
TO product_analytics.all_events AS
SELECT DISTINCT ON (project_id,auto_captured,event_name) project_id,
`$auto_captured` AS auto_captured,
`$event_name` AS event_name,
display_name,
description
FROM product_analytics.events
LEFT JOIN (SELECT project_id,
auto_captured,
event_name,
display_name,
description
FROM product_analytics.all_events
WHERE all_events.display_name != ''
OR all_events.description != '') AS old_data
ON (events.project_id = old_data.project_id AND events.`$auto_captured` = old_data.auto_captured AND
events.`$event_name` = old_data.event_name);
-- -------- END ---------
-- The full list of event-properties (used to tell which property belongs to which event)
-- Experimental: This table is filled by an incremental materialized view
CREATE TABLE IF NOT EXISTS product_analytics.event_properties
(
project_id UInt16,
event_name String,
property_name String,
value_type String,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, event_name, property_name, value_type);
-- ----------------- This is experimental, if it doesn't work, we need to do it in db worker -------------
-- Incremental materialized view to fill event_properties using $properties
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.event_properties_extractor_mv
TO product_analytics.event_properties AS
SELECT project_id,
`$event_name` AS event_name,
property_name,
JSONType(JSONExtractRaw(toString(`$properties`), property_name)) AS value_type
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name;
-- Incremental materialized view to fill event_properties using properties
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.event_cproperties_extractor
TO product_analytics.event_properties AS
SELECT project_id,
`$event_name` AS event_name,
property_name,
JSONType(JSONExtractRaw(toString(`properties`), property_name)) AS value_type
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`properties`)) as property_name;
-- -------- END ---------
-- The full list of properties (events and users) -- The full list of properties (events and users)
-- Experimental: This table is filled by an incremental materialized view
CREATE TABLE IF NOT EXISTS product_analytics.all_properties CREATE TABLE IF NOT EXISTS product_analytics.all_properties
( (
project_id UInt16, project_id UInt16,
@ -730,95 +660,3 @@ CREATE TABLE IF NOT EXISTS product_analytics.all_properties
_timestamp DateTime DEFAULT now() _timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp) ) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, property_name, is_event_property); ORDER BY (project_id, property_name, is_event_property);
-- ----------------- This is experimental, if it doesn't work, we need to do it in db worker -------------
-- Incremental materialized view to fill all_properties using $properties
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.all_properties_extractor_mv
TO product_analytics.all_properties AS
SELECT project_id,
property_name,
TRUE AS is_event_property,
display_name,
description,
status,
data_count,
query_count
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name
LEFT JOIN (SELECT project_id,
property_name,
display_name,
description,
status,
data_count,
query_count
FROM product_analytics.all_properties
WHERE (all_properties.display_name != ''
OR all_properties.description != '')
AND is_event_property) AS old_data
ON (events.project_id = old_data.project_id AND property_name = old_data.property_name);
-- Incremental materialized view to fill all_properties using properties
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.all_cproperties_extractor_mv
TO product_analytics.all_properties AS
SELECT project_id,
property_name,
TRUE AS is_event_property,
display_name,
description,
status,
data_count,
query_count
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`properties`)) as property_name
LEFT JOIN (SELECT project_id,
property_name,
display_name,
description,
status,
data_count,
query_count
FROM product_analytics.all_properties
WHERE (all_properties.display_name != ''
OR all_properties.description != '')
AND is_event_property) AS old_data
ON (events.project_id = old_data.project_id AND property_name = old_data.property_name);
-- -------- END ---------
-- Some random examples of property-values, limited by 2 per property
-- Experimental: This table is filled by a refreshable materialized view
CREATE TABLE IF NOT EXISTS product_analytics.property_values_samples
(
project_id UInt16,
property_name String,
is_event_property BOOL,
value String,
_timestamp DateTime DEFAULT now()
)
ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, property_name, is_event_property);
-- Incremental materialized view to get random examples of property values using $properties & properties
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.property_values_sampler_mv
REFRESH EVERY 30 HOUR TO product_analytics.property_values_samples AS
SELECT project_id,
property_name,
TRUE AS is_event_property,
JSONExtractString(toString(`$properties`), property_name) AS value
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name
WHERE randCanonical() < 0.5 -- This randomly skips inserts
AND value != ''
LIMIT 2 BY project_id,property_name
UNION ALL
-- using union because each table should be the target of 1 single refreshable MV
SELECT project_id,
property_name,
TRUE AS is_event_property,
JSONExtractString(toString(`properties`), property_name) AS value
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`properties`)) as property_name
WHERE randCanonical() < 0.5 -- This randomly skips inserts
AND value != ''
LIMIT 2 BY project_id,property_name;

View file

@ -1,30 +0,0 @@
\set previous_version 'v1.22.0-ee'
\set next_version 'v1.23.0-ee'
SELECT openreplay_version() AS current_version,
openreplay_version() = :'previous_version' AS valid_previous,
openreplay_version() = :'next_version' AS is_next
\gset
\if :valid_previous
\echo valid previous DB version :'previous_version', starting DB upgrade to :'next_version'
BEGIN;
SELECT format($fn_def$
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT '%1$s'
$$ LANGUAGE sql IMMUTABLE;
$fn_def$, :'next_version')
\gexec
--
COMMIT;
\elif :is_next
\echo new version detected :'next_version', nothing to do
\else
\warn skipping DB upgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
\endif

View file

@ -1,4 +1,4 @@
\set or_version 'v1.23.0-ee' \set or_version 'v1.22.0-ee'
SET client_min_messages TO NOTICE; SET client_min_messages TO NOTICE;
\set ON_ERROR_STOP true \set ON_ERROR_STOP true
SELECT EXISTS (SELECT 1 SELECT EXISTS (SELECT 1

View file

@ -1,3 +0,0 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
DROP TABLE IF EXISTS product_analytics.event_properties;

View file

@ -1,27 +0,0 @@
\set previous_version 'v1.23.0-ee'
\set next_version 'v1.22.0-ee'
SELECT openreplay_version() AS current_version,
openreplay_version() = :'previous_version' AS valid_previous,
openreplay_version() = :'next_version' AS is_next
\gset
\if :valid_previous
\echo valid previous DB version :'previous_version', starting DB downgrade to :'next_version'
BEGIN;
SELECT format($fn_def$
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT '%1$s'
$$ LANGUAGE sql IMMUTABLE;
$fn_def$, :'next_version')
\gexec
COMMIT;
\elif :is_next
\echo new version detected :'next_version', nothing to do
\else
\warn skipping DB downgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
\endif

View file

@ -1,5 +1,4 @@
import withSiteIdUpdater from 'HOCs/withSiteIdUpdater'; import withSiteIdUpdater from 'HOCs/withSiteIdUpdater';
import withSiteIdUpdater from 'HOCs/withSiteIdUpdater';
import React, { Suspense, lazy } from 'react'; import React, { Suspense, lazy } from 'react';
import { Redirect, Route, Switch } from 'react-router-dom'; import { Redirect, Route, Switch } from 'react-router-dom';
import { observer } from 'mobx-react-lite'; import { observer } from 'mobx-react-lite';
@ -10,7 +9,7 @@ import { Loader } from 'UI';
import APIClient from './api_client'; import APIClient from './api_client';
import * as routes from './routes'; import * as routes from './routes';
import { debounce } from '@/utils'; import { debounceCall } from '@/utils';
const components: any = { const components: any = {
SessionPure: lazy(() => import('Components/Session/Session')), SessionPure: lazy(() => import('Components/Session/Session')),
@ -88,7 +87,6 @@ const ASSIST_PATH = routes.assist();
const LIVE_SESSION_PATH = routes.liveSession(); const LIVE_SESSION_PATH = routes.liveSession();
const MULTIVIEW_PATH = routes.multiview(); const MULTIVIEW_PATH = routes.multiview();
const MULTIVIEW_INDEX_PATH = routes.multiviewIndex(); const MULTIVIEW_INDEX_PATH = routes.multiviewIndex();
const ASSIST_STATS_PATH = routes.assistStats();
const USABILITY_TESTING_PATH = routes.usabilityTesting(); const USABILITY_TESTING_PATH = routes.usabilityTesting();
const USABILITY_TESTING_EDIT_PATH = routes.usabilityTestingEdit(); const USABILITY_TESTING_EDIT_PATH = routes.usabilityTestingEdit();
@ -99,7 +97,6 @@ const SPOT_PATH = routes.spot();
const SCOPE_SETUP = routes.scopeSetup(); const SCOPE_SETUP = routes.scopeSetup();
const HIGHLIGHTS_PATH = routes.highlights(); const HIGHLIGHTS_PATH = routes.highlights();
let debounceSearch: any = () => {};
function PrivateRoutes() { function PrivateRoutes() {
const { projectsStore, userStore, integrationsStore, searchStore } = useStore(); const { projectsStore, userStore, integrationsStore, searchStore } = useStore();
@ -124,13 +121,9 @@ function PrivateRoutes() {
} }
}, [siteId]); }, [siteId]);
React.useEffect(() => {
debounceSearch = debounce(() => searchStore.fetchSessions(), 250);
}, []);
React.useEffect(() => { React.useEffect(() => {
if (!searchStore.urlParsed) return; if (!searchStore.urlParsed) return;
debounceSearch(); debounceCall(() => searchStore.fetchSessions(true), 250)()
}, [searchStore.urlParsed, searchStore.instance.filters, searchStore.instance.eventsOrder]); }, [searchStore.urlParsed, searchStore.instance.filters, searchStore.instance.eventsOrder]);
return ( return (

View file

@ -16,10 +16,10 @@ function ProfilerDoc() {
? sites.find((site) => site.id === siteId)?.projectKey ? sites.find((site) => site.id === siteId)?.projectKey
: sites[0]?.projectKey; : sites[0]?.projectKey;
const usage = `import { tracker } from '@openreplay/tracker'; const usage = `import OpenReplay from '@openreplay/tracker';
import trackerProfiler from '@openreplay/tracker-profiler'; import trackerProfiler from '@openreplay/tracker-profiler';
//... //...
tracker.configure({ const tracker = new OpenReplay({
projectKey: '${projectKey}' projectKey: '${projectKey}'
}); });
tracker.start() tracker.start()
@ -29,12 +29,10 @@ export const profiler = tracker.use(trackerProfiler());
const fn = profiler('call_name')(() => { const fn = profiler('call_name')(() => {
//... //...
}, thisArg); // thisArg is optional`; }, thisArg); // thisArg is optional`;
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs'; const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerProfiler from '@openreplay/tracker-profiler/cjs'; import trackerProfiler from '@openreplay/tracker-profiler/cjs';
//... //...
tracker.configure({ const tracker = new OpenReplay({
projectKey: '${projectKey}' projectKey: '${projectKey}'
}); });
//... //...

View file

@ -7,19 +7,17 @@ import { useTranslation } from 'react-i18next';
function AssistNpm(props) { function AssistNpm(props) {
const { t } = useTranslation(); const { t } = useTranslation();
const usage = `import { tracker } from '@openreplay/tracker'; const usage = `import OpenReplay from '@openreplay/tracker';
import trackerAssist from '@openreplay/tracker-assist'; import trackerAssist from '@openreplay/tracker-assist';
tracker.configure({ const tracker = new OpenReplay({
projectKey: '${props.projectKey}', projectKey: '${props.projectKey}',
}); });
tracker.start() tracker.start()
tracker.use(trackerAssist(options)); // check the list of available options below`; tracker.use(trackerAssist(options)); // check the list of available options below`;
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs'; const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerAssist from '@openreplay/tracker-assist/cjs'; import trackerAssist from '@openreplay/tracker-assist/cjs';
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${props.projectKey}' projectKey: '${props.projectKey}'
}); });
const trackerAssist = tracker.use(trackerAssist(options)); // check the list of available options below const trackerAssist = tracker.use(trackerAssist(options)); // check the list of available options below

View file

@ -14,20 +14,19 @@ function GraphQLDoc() {
const projectKey = siteId const projectKey = siteId
? sites.find((site) => site.id === siteId)?.projectKey ? sites.find((site) => site.id === siteId)?.projectKey
: sites[0]?.projectKey; : sites[0]?.projectKey;
const usage = `import { tracker } from '@openreplay/tracker'; const usage = `import OpenReplay from '@openreplay/tracker';
import trackerGraphQL from '@openreplay/tracker-graphql'; import trackerGraphQL from '@openreplay/tracker-graphql';
//... //...
tracker.configure({ const tracker = new OpenReplay({
projectKey: '${projectKey}' projectKey: '${projectKey}'
}); });
tracker.start() tracker.start()
//... //...
export const recordGraphQL = tracker.use(trackerGraphQL());`; export const recordGraphQL = tracker.use(trackerGraphQL());`;
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs'; const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerGraphQL from '@openreplay/tracker-graphql/cjs'; import trackerGraphQL from '@openreplay/tracker-graphql/cjs';
//... //...
tracker.configure({ const tracker = new OpenReplay({
projectKey: '${projectKey}' projectKey: '${projectKey}'
}); });
//... //...

View file

@ -15,21 +15,20 @@ function MobxDoc() {
? sites.find((site) => site.id === siteId)?.projectKey ? sites.find((site) => site.id === siteId)?.projectKey
: sites[0]?.projectKey; : sites[0]?.projectKey;
const mobxUsage = `import { tracker } from '@openreplay/tracker'; const mobxUsage = `import OpenReplay from '@openreplay/tracker';
import trackerMobX from '@openreplay/tracker-mobx'; import trackerMobX from '@openreplay/tracker-mobx';
//... //...
tracker.configure({ const tracker = new OpenReplay({
projectKey: '${projectKey}' projectKey: '${projectKey}'
}); });
tracker.use(trackerMobX(<options>)); // check list of available options below tracker.use(trackerMobX(<options>)); // check list of available options below
tracker.start(); tracker.start();
`; `;
const mobxUsageCjs = `import { tracker } from '@openreplay/tracker/cjs'; const mobxUsageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerMobX from '@openreplay/tracker-mobx/cjs'; import trackerMobX from '@openreplay/tracker-mobx/cjs';
//... //...
tracker.configure({ const tracker = new OpenReplay({
projectKey: '${projectKey}' projectKey: '${projectKey}'
}); });
tracker.use(trackerMobX(<options>)); // check list of available options below tracker.use(trackerMobX(<options>)); // check list of available options below

View file

@ -16,10 +16,10 @@ function NgRxDoc() {
: sites[0]?.projectKey; : sites[0]?.projectKey;
const usage = `import { StoreModule } from '@ngrx/store'; const usage = `import { StoreModule } from '@ngrx/store';
import { reducers } from './reducers'; import { reducers } from './reducers';
import { tracker } from '@openreplay/tracker'; import OpenReplay from '@openreplay/tracker';
import trackerNgRx from '@openreplay/tracker-ngrx'; import trackerNgRx from '@openreplay/tracker-ngrx';
//... //...
tracker.configure({ const tracker = new OpenReplay({
projectKey: '${projectKey}' projectKey: '${projectKey}'
}); });
tracker.start() tracker.start()
@ -32,11 +32,10 @@ const metaReducers = [tracker.use(trackerNgRx(<options>))]; // check list of ava
export class AppModule {}`; export class AppModule {}`;
const usageCjs = `import { StoreModule } from '@ngrx/store'; const usageCjs = `import { StoreModule } from '@ngrx/store';
import { reducers } from './reducers'; import { reducers } from './reducers';
import { tracker } from '@openreplay/tracker/cjs'; import OpenReplay from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerNgRx from '@openreplay/tracker-ngrx/cjs'; import trackerNgRx from '@openreplay/tracker-ngrx/cjs';
//... //...
tracker.configure({ const tracker = new OpenReplay({
projectKey: '${projectKey}' projectKey: '${projectKey}'
}); });
//... //...

View file

@ -17,10 +17,10 @@ function PiniaDoc() {
? sites.find((site) => site.id === siteId)?.projectKey ? sites.find((site) => site.id === siteId)?.projectKey
: sites[0]?.projectKey; : sites[0]?.projectKey;
const usage = `import Vuex from 'vuex' const usage = `import Vuex from 'vuex'
import { tracker } from '@openreplay/tracker'; import OpenReplay from '@openreplay/tracker';
import trackerVuex from '@openreplay/tracker-vuex'; import trackerVuex from '@openreplay/tracker-vuex';
//... //...
tracker.configure({ const tracker = new OpenReplay({
projectKey: '${projectKey}' projectKey: '${projectKey}'
}); });
tracker.start() tracker.start()

View file

@ -16,10 +16,10 @@ function ReduxDoc() {
: sites[0]?.projectKey; : sites[0]?.projectKey;
const usage = `import { applyMiddleware, createStore } from 'redux'; const usage = `import { applyMiddleware, createStore } from 'redux';
import { tracker } from '@openreplay/tracker'; import OpenReplay from '@openreplay/tracker';
import trackerRedux from '@openreplay/tracker-redux'; import trackerRedux from '@openreplay/tracker-redux';
//... //...
tracker.configure({ const tracker = new OpenReplay({
projectKey: '${projectKey}' projectKey: '${projectKey}'
}); });
tracker.start() tracker.start()
@ -29,11 +29,10 @@ const store = createStore(
applyMiddleware(tracker.use(trackerRedux(<options>))) // check list of available options below applyMiddleware(tracker.use(trackerRedux(<options>))) // check list of available options below
);`; );`;
const usageCjs = `import { applyMiddleware, createStore } from 'redux'; const usageCjs = `import { applyMiddleware, createStore } from 'redux';
import { tracker } from '@openreplay/tracker/cjs'; import OpenReplay from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerRedux from '@openreplay/tracker-redux/cjs'; import trackerRedux from '@openreplay/tracker-redux/cjs';
//... //...
tracker.configure({ const tracker = new OpenReplay({
projectKey: '${projectKey}' projectKey: '${projectKey}'
}); });
//... //...

View file

@ -16,10 +16,10 @@ function VueDoc() {
: sites[0]?.projectKey; : sites[0]?.projectKey;
const usage = `import Vuex from 'vuex' const usage = `import Vuex from 'vuex'
import { tracker } from '@openreplay/tracker'; import OpenReplay from '@openreplay/tracker';
import trackerVuex from '@openreplay/tracker-vuex'; import trackerVuex from '@openreplay/tracker-vuex';
//... //...
tracker.configure({ const tracker = new OpenReplay({
projectKey: '${projectKey}' projectKey: '${projectKey}'
}); });
tracker.start() tracker.start()
@ -29,11 +29,10 @@ const store = new Vuex.Store({
plugins: [tracker.use(trackerVuex(<options>))] // check list of available options below plugins: [tracker.use(trackerVuex(<options>))] // check list of available options below
});`; });`;
const usageCjs = `import Vuex from 'vuex' const usageCjs = `import Vuex from 'vuex'
import { tracker } from '@openreplay/tracker/cjs'; import OpenReplay from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerVuex from '@openreplay/tracker-vuex/cjs'; import trackerVuex from '@openreplay/tracker-vuex/cjs';
//... //...
tracker.configure({ const tracker = new OpenReplay({
projectKey: '${projectKey}' projectKey: '${projectKey}'
}); });
//... //...

View file

@ -16,10 +16,11 @@ function ZustandDoc(props) {
: sites[0]?.projectKey; : sites[0]?.projectKey;
const usage = `import create from "zustand"; const usage = `import create from "zustand";
import { tracker } from '@openreplay/tracker'; import Tracker from '@openreplay/tracker';
import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand'; import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand';
tracker.configure({
const tracker = new Tracker({
projectKey: ${projectKey}, projectKey: ${projectKey},
}); });
@ -42,12 +43,11 @@ const useBearStore = create(
) )
`; `;
const usageCjs = `import create from "zustand"; const usageCjs = `import create from "zustand";
import { tracker } from '@openreplay/tracker/cjs'; import Tracker from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand/cjs'; import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand/cjs';
tracker.configure({ const tracker = new Tracker({
projectKey: ${projectKey}, projectKey: ${projectKey},
}); });

View file

@ -3,7 +3,6 @@ import withPageTitle from 'HOCs/withPageTitle';
import { PageTitle } from 'UI'; import { PageTitle } from 'UI';
import { observer } from 'mobx-react-lite'; import { observer } from 'mobx-react-lite';
import { useStore } from 'App/mstore'; import { useStore } from 'App/mstore';
import LanguageSwitcher from "App/components/LanguageSwitcher";
import Settings from './Settings'; import Settings from './Settings';
import ChangePassword from './ChangePassword'; import ChangePassword from './ChangePassword';
import styles from './profileSettings.module.css'; import styles from './profileSettings.module.css';
@ -21,90 +20,107 @@ function ProfileSettings() {
return ( return (
<div className="bg-white rounded-lg border shadow-sm p-5"> <div className="bg-white rounded-lg border shadow-sm p-5">
<PageTitle title={<div>{t('Account')}</div>} /> <PageTitle title={<div>{t('Account')}</div>} />
<Section <div className="flex items-center">
title={t('Profile')} <div className={styles.left}>
description={t('Your email address is your identity on OpenReplay and is used to login.')} <h4 className="text-lg mb-4">{t('Profile')}</h4>
children={<Settings />} <div className={styles.info}>
/> {t(
'Your email address is your identity on OpenReplay and is used to login.',
)}
</div>
</div>
<div>
<Settings />
</div>
</div>
<div className="border-b my-10" /> <div className="border-b my-10" />
{account.hasPassword && ( {account.hasPassword && (
<> <>
<Section <div className="flex items-center">
title={t('Change Password')} <div className={styles.left}>
description={t('Updating your password from time to time enhaces your accounts security')} <h4 className="text-lg mb-4">{t('Change Password')}</h4>
children={<ChangePassword />} <div className={styles.info}>
/> {t('Updating your password from time to time enhances your accounts security.')}
</div>
</div>
<div>
<ChangePassword />
</div>
</div>
<div className="border-b my-10" /> <div className="border-b my-10" />
</> </>
)} )}
<Section <div className="flex items-center">
title={t('Interface Language')} <div className={styles.left}>
description={t('Select the language in which OpenReplay will appear.')} <h4 className="text-lg mb-4">{t('Organization API Key')}</h4>
children={<LanguageSwitcher />} <div className={styles.info}>
/> {t('Your API key gives you access to an extra set of services.')}
</div>
<Section </div>
title={t('Organization API Key')} <div>
description={t('Your API key gives you access to an extra set of services.')} <Api />
children={<Api />} </div>
/> </div>
{isEnterprise && (account.admin || account.superAdmin) && ( {isEnterprise && (account.admin || account.superAdmin) && (
<> <>
<div className="border-b my-10" /> <div className="border-b my-10" />
<Section <div className="flex items-center">
title={t('Tenant Key')} <div className={styles.left}>
description={t('For SSO (SAML) authentication.')} <h4 className="text-lg mb-4">{t('Tenant Key')}</h4>
children={<TenantKey />} <div className={styles.info}>
/> {t('For SSO (SAML) authentication.')}
</div>
</div>
<div>
<TenantKey />
</div>
</div>
</> </>
)} )}
{!isEnterprise && ( {!isEnterprise && (
<> <>
<div className="border-b my-10" /> <div className="border-b my-10" />
<Section <div className="flex items-center">
title={t('Data Collection')} <div className={styles.left}>
description={t('Enables you to control how OpenReplay captures data on your organizations usage to improve our product.')} <h4 className="text-lg mb-4">{t('Data Collection')}</h4>
children={<OptOut />} <div className={styles.info}>
/> {t('Enables you to control how OpenReplay captures data on your organizations usage to improve our product.')}
</div>
</div>
<div>
<OptOut />
</div>
</div>
</> </>
)} )}
{account.license && ( {account.license && (
<> <>
<div className="border-b my-10" /> <div className="border-b my-10" />
<Section title={t('License')} description={t('License key and expiration date.')} children={<Licenses />} />
<div className="flex items-center">
<div className={styles.left}>
<h4 className="text-lg mb-4">{t('License')}</h4>
<div className={styles.info}>
{t('License key and expiration date.')}
</div>
</div>
<div>
<Licenses />
</div>
</div>
</> </>
)} )}
</div> </div>
); );
} }
function Section({ title, description, children }: {
title: string;
description: string;
children: React.ReactNode;
}) {
return (
<div className="flex items-center">
<div className={styles.left}>
<h4 className="text-lg mb-4">{title}</h4>
<div className={styles.info}>
{description}
</div>
</div>
<div>
{children}
</div>
</div>
)
}
export default withPageTitle('Account - OpenReplay Preferences')( export default withPageTitle('Account - OpenReplay Preferences')(
observer(ProfileSettings), observer(ProfileSettings),
); );

View file

@ -6,6 +6,7 @@ import DefaultPlaying from 'Shared/SessionSettings/components/DefaultPlaying';
import DefaultTimezone from 'Shared/SessionSettings/components/DefaultTimezone'; import DefaultTimezone from 'Shared/SessionSettings/components/DefaultTimezone';
import ListingVisibility from 'Shared/SessionSettings/components/ListingVisibility'; import ListingVisibility from 'Shared/SessionSettings/components/ListingVisibility';
import MouseTrailSettings from 'Shared/SessionSettings/components/MouseTrailSettings'; import MouseTrailSettings from 'Shared/SessionSettings/components/MouseTrailSettings';
import VirtualModeSettings from '../shared/SessionSettings/components/VirtualMode';
import DebugLog from './DebugLog'; import DebugLog from './DebugLog';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
@ -35,6 +36,7 @@ function SessionsListingSettings() {
<div className="flex flex-col gap-2"> <div className="flex flex-col gap-2">
<MouseTrailSettings /> <MouseTrailSettings />
<DebugLog /> <DebugLog />
<VirtualModeSettings />
</div> </div>
</div> </div>
</div> </div>

View file

@ -6,6 +6,7 @@ import CardSessionsByList from 'Components/Dashboard/Widgets/CardSessionsByList'
import { useModal } from 'Components/ModalContext'; import { useModal } from 'Components/ModalContext';
import Widget from '@/mstore/types/widget'; import Widget from '@/mstore/types/widget';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import { FilterKey } from 'Types/filter/filterType';
interface Props { interface Props {
metric?: any; metric?: any;
@ -35,20 +36,20 @@ function SessionsBy(props: Props) {
...filtersMap[metric.metricOf], ...filtersMap[metric.metricOf],
value: [data.name], value: [data.name],
type: filtersMap[metric.metricOf].key, type: filtersMap[metric.metricOf].key,
filters: filtersMap[metric.metricOf].filters?.map((f: any) => { filters: [],
const {
key,
operatorOptions,
category,
icon,
label,
options,
...cleaned
} = f;
return { ...cleaned, type: f.key, value: [] };
}),
}; };
if (metric.metricOf === FilterKey.FETCH) {
baseFilter.filters = [
{
key: FilterKey.FETCH_URL,
operator: 'is',
value: [data.name],
type: FilterKey.FETCH_URL,
}
];
}
const { const {
key, key,
operatorOptions, operatorOptions,

View file

@ -0,0 +1,32 @@
import React from 'react';
import cn from 'classnames';
import { Styles } from '../../common';
import stl from './scale.module.css';
import { useTranslation } from 'react-i18next';
function Scale({ colors }) {
const { t } = useTranslation();
const lastIndex = Styles.compareColors.length - 1;
return (
<div className={cn(stl.bars, 'absolute bottom-0 mb-4')}>
{Styles.compareColors.map((c, i) => (
<div
key={i}
style={{
backgroundColor: c,
width: '6px',
height: '15px',
marginBottom: '1px',
}}
className="flex items-center justify-center"
>
{i === 0 && <div className="text-xs pl-12">{t('Slow')}</div>}
{i === lastIndex && <div className="text-xs pl-12">{t('Fast')}</div>}
</div>
))}
</div>
);
}
export default Scale;

View file

@ -0,0 +1,55 @@
.maps {
height: auto;
width: 110%;
stroke: $gray-medium;
stroke-width: 1;
stroke-linecap: round;
stroke-linejoin: round;
margin-top: -20px;
}
.location {
fill: $gray-light !important;
cursor: pointer;
stroke: #fff;
&:focus,
&:hover {
fill: #2E3ECC !important;
outline: 0;
}
}
.heat_index0 {
fill:$gray-light !important;
}
.heat_index5 {
fill: #B0B8FF !important;
}
.heat_index4 {
fill:#6171FF !important;
}
.heat_index3 {
fill: #394EFF !important;
}
.heat_index2 {
fill: #2E3ECC !important;
}
.heat_index1 {
fill: #222F99 !important;
}
.tooltip {
position: fixed;
padding: 5px;
border: 1px solid $gray-light;
border-radius: 3px;
background-color: white;
font-size: 12px;
line-height: 1.2;
}

View file

@ -0,0 +1,134 @@
import React from 'react';
import { NoContent } from 'UI';
import { observer } from 'mobx-react-lite';
import { numberWithCommas, positionOfTheNumber } from 'App/utils';
import WorldMap from '@svg-maps/world';
import { SVGMap } from 'react-svg-map';
import cn from 'classnames';
import { NO_METRIC_DATA } from 'App/constants/messages';
import { InfoCircleOutlined } from '@ant-design/icons';
import stl from './SpeedIndexByLocation.module.css';
import Scale from './Scale';
import { Styles, AvgLabel } from '../../common';
import { useTranslation } from 'react-i18next';
interface Props {
data?: any;
}
function SpeedIndexByLocation(props: Props) {
const { t } = useTranslation();
const { data } = props;
const wrapper: any = React.useRef(null);
const [tooltipStyle, setTooltipStyle] = React.useState({ display: 'none' });
const [pointedLocation, setPointedLocation] = React.useState<any>(null);
const dataMap: any = React.useMemo(() => {
const _data: any = {};
const max = data.chart?.reduce(
(acc: any, item: any) => Math.max(acc, item.value),
0,
);
const min = data.chart?.reduce(
(acc: any, item: any) => Math.min(acc, item.value),
0,
);
data.chart?.forEach((item: any) => {
if (!item || !item.userCountry) {
return;
}
item.perNumber = positionOfTheNumber(min, max, item.value, 5);
_data[item.userCountry.toLowerCase()] = item;
});
return _data;
}, [data.chart]);
const getLocationClassName = (location: any) => {
const i = dataMap[location.id] ? dataMap[location.id].perNumber : 0;
const cls = stl[`heat_index${i}`];
return cn(stl.location, cls);
};
const getLocationName = (event: any) => {
if (!event) return null;
const id = event.target.attributes.id.value;
const name = event.target.attributes.name.value;
const percentage = dataMap[id] ? dataMap[id].perNumber : 0;
return { name, id, percentage };
};
const handleLocationMouseOver = (event: any) => {
const pointedLocation = getLocationName(event);
setPointedLocation(pointedLocation);
};
const handleLocationMouseOut = () => {
setTooltipStyle({ display: 'none' });
setPointedLocation(null);
};
const handleLocationMouseMove = (event: any) => {
const tooltipStyle = {
display: 'block',
top: event.clientY + 10,
left: event.clientX - 100,
};
setTooltipStyle(tooltipStyle);
};
return (
<NoContent
size="small"
show={false}
style={{ height: '240px' }}
title={
<div className="flex items-center gap-2 text-base font-normal">
<InfoCircleOutlined size={12} /> {NO_METRIC_DATA}
</div>
}
>
<div className="absolute right-0 mr-4 top=0 w-full flex justify-end">
<AvgLabel text="Avg" count={Math.round(data.value)} unit="ms" />
</div>
<Scale colors={Styles.compareColors} />
<div className="map-target" />
<div
style={{
height: '234px',
width: '100%',
margin: '0 auto',
display: 'flex',
}}
ref={wrapper}
>
<SVGMap
map={WorldMap}
className={stl.maps}
locationClassName={getLocationClassName}
onLocationMouseOver={handleLocationMouseOver}
onLocationMouseOut={handleLocationMouseOut}
onLocationMouseMove={handleLocationMouseMove}
/>
</div>
<div className={stl.tooltip} style={tooltipStyle}>
{pointedLocation && (
<>
<div>{pointedLocation.name}</div>
<div>
{t('Avg:')}{' '}
<strong>
{dataMap[pointedLocation.id]
? numberWithCommas(
parseInt(dataMap[pointedLocation.id].value),
)
: 0}
</strong>
</div>
</>
)}
</div>
</NoContent>
);
}
export default observer(SpeedIndexByLocation);

View file

@ -0,0 +1 @@
export { default } from './SpeedIndexByLocation';

View file

@ -0,0 +1,11 @@
.bars {
& div:first-child {
border-top-left-radius: 3px;
border-top-right-radius: 3px;
}
& div:last-child {
border-bottom-left-radius: 3px;
border-bottom-right-radius: 3px;
}
}

View file

@ -23,6 +23,7 @@ function BottomButtons({
<Button <Button
loading={loading} loading={loading}
type="primary" type="primary"
htmlType="submit"
disabled={loading || !instance.validate()} disabled={loading || !instance.validate()}
id="submit-button" id="submit-button"
> >

View file

@ -43,7 +43,7 @@ function ClickMapRagePicker() {
<Checkbox onChange={onToggle} label={t('Include rage clicks')} /> <Checkbox onChange={onToggle} label={t('Include rage clicks')} />
<Button size="small" onClick={refreshHeatmapSession}> <Button size="small" onClick={refreshHeatmapSession}>
{t('Get new session')} {t('Get new image')}
</Button> </Button>
</div> </div>
); );

View file

@ -0,0 +1,92 @@
import React from 'react';
import ExCard from 'Components/Dashboard/components/DashboardList/NewDashModal/Examples/ExCard';
import InsightsCard from 'Components/Dashboard/Widgets/CustomMetricsWidgets/InsightsCard';
import { InsightIssue } from 'App/mstore/types/widget';
import SessionsPerBrowser from 'Components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser';
import SpeedIndexByLocation from 'Components/Dashboard/Widgets/PredefinedWidgets/SpeedIndexByLocation';
interface Props {
title: string;
type: string;
onCard: (card: string) => void;
}
function SpeedIndexByLocationExample(props: Props) {
const data = {
value: 1480,
chart: [
{
userCountry: 'AT',
value: 415,
},
{
userCountry: 'PL',
value: 433.1666666666667,
},
{
userCountry: 'FR',
value: 502,
},
{
userCountry: 'IT',
value: 540.4117647058823,
},
{
userCountry: 'TH',
value: 662.0,
},
{
userCountry: 'ES',
value: 740.5454545454545,
},
{
userCountry: 'SG',
value: 889.6666666666666,
},
{
userCountry: 'TW',
value: 1008.0,
},
{
userCountry: 'HU',
value: 1027.0,
},
{
userCountry: 'DE',
value: 1054.4583333333333,
},
{
userCountry: 'BE',
value: 1126.0,
},
{
userCountry: 'TR',
value: 1174.0,
},
{
userCountry: 'US',
value: 1273.3015873015872,
},
{
userCountry: 'GB',
value: 1353.8095238095239,
},
{
userCountry: 'VN',
value: 1473.8181818181818,
},
{
userCountry: 'HK',
value: 1654.6666666666667,
},
],
unit: 'ms',
};
return (
<ExCard {...props}>
<SpeedIndexByLocation data={data} />
</ExCard>
);
}
export default SpeedIndexByLocationExample;

View file

@ -64,6 +64,7 @@ function DashboardView(props: Props) {
}; };
useEffect(() => { useEffect(() => {
dashboardStore.resetPeriod();
if (queryParams.has('modal')) { if (queryParams.has('modal')) {
onAddWidgets(); onAddWidgets();
trimQuery(); trimQuery();

View file

@ -117,8 +117,6 @@ const ListView: React.FC<Props> = ({
if (disableSelection) { if (disableSelection) {
const path = withSiteId(`/metrics/${metric.metricId}`, siteId); const path = withSiteId(`/metrics/${metric.metricId}`, siteId);
history.push(path); history.push(path);
} else {
toggleSelection?.(metric.metricId);
} }
}; };

View file

@ -181,9 +181,10 @@ function WidgetChart(props: Props) {
} }
prevMetricRef.current = _metric; prevMetricRef.current = _metric;
const timestmaps = drillDownPeriod.toTimestamps(); const timestmaps = drillDownPeriod.toTimestamps();
const density = props.isPreview ? metric.density : dashboardStore.selectedDensity
const payload = isSaved const payload = isSaved
? { ...metricParams } ? { ...metricParams, density }
: { ...params, ...timestmaps, ..._metric.toJson() }; : { ...params, ...timestmaps, ..._metric.toJson(), density };
debounceRequest( debounceRequest(
_metric, _metric,
payload, payload,
@ -200,6 +201,7 @@ function WidgetChart(props: Props) {
const payload = { const payload = {
...params, ...params,
..._metric.toJson(), ..._metric.toJson(),
viewType: 'lineChart',
}; };
fetchMetricChartData( fetchMetricChartData(
_metric, _metric,

View file

@ -55,7 +55,7 @@ function RangeGranularity({
} }
const PAST_24_HR_MS = 24 * 60 * 60 * 1000; const PAST_24_HR_MS = 24 * 60 * 60 * 1000;
function calculateGranularities(periodDurationMs: number) { export function calculateGranularities(periodDurationMs: number) {
const granularities = [ const granularities = [
{ label: 'Hourly', durationMs: 60 * 60 * 1000 }, { label: 'Hourly', durationMs: 60 * 60 * 1000 },
{ label: 'Daily', durationMs: 24 * 60 * 60 * 1000 }, { label: 'Daily', durationMs: 24 * 60 * 60 * 1000 },

View file

@ -66,23 +66,8 @@ export default observer(WidgetFormNew);
const FilterSection = observer( const FilterSection = observer(
({ layout, metric, excludeFilterKeys, excludeCategory }: any) => { ({ layout, metric, excludeFilterKeys, excludeCategory }: any) => {
const isTable = metric.metricType === TABLE;
const isHeatMap = metric.metricType === HEATMAP;
const isFunnel = metric.metricType === FUNNEL;
const isInsights = metric.metricType === INSIGHTS;
const isPathAnalysis = metric.metricType === USER_PATH;
const isRetention = metric.metricType === RETENTION;
const canAddSeries = metric.series.length < 3;
const isSingleSeries =
isTable ||
isFunnel ||
isHeatMap ||
isInsights ||
isRetention ||
isPathAnalysis;
const { t } = useTranslation(); const { t } = useTranslation();
const allOpen = isSingleSeries || layout.startsWith('flex-row'); const allOpen = layout.startsWith('flex-row');
const defaultClosed = React.useRef(!allOpen && metric.exists()); const defaultClosed = React.useRef(!allOpen && metric.exists());
const [seriesCollapseState, setSeriesCollapseState] = React.useState< const [seriesCollapseState, setSeriesCollapseState] = React.useState<
Record<number, boolean> Record<number, boolean>
@ -99,6 +84,21 @@ const FilterSection = observer(
}); });
setSeriesCollapseState(defaultSeriesCollapseState); setSeriesCollapseState(defaultSeriesCollapseState);
}, [metric.series]); }, [metric.series]);
const isTable = metric.metricType === TABLE;
const isHeatMap = metric.metricType === HEATMAP;
const isFunnel = metric.metricType === FUNNEL;
const isInsights = metric.metricType === INSIGHTS;
const isPathAnalysis = metric.metricType === USER_PATH;
const isRetention = metric.metricType === RETENTION;
const canAddSeries = metric.series.length < 3;
const isSingleSeries =
isTable ||
isFunnel ||
isHeatMap ||
isInsights ||
isRetention ||
isPathAnalysis;
const collapseAll = () => { const collapseAll = () => {
setSeriesCollapseState((seriesCollapseState) => { setSeriesCollapseState((seriesCollapseState) => {

View file

@ -18,6 +18,7 @@ import SessionsImpactedBySlowRequests from 'App/components/Dashboard/Widgets/Pre
import SessionsPerBrowser from 'App/components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser'; import SessionsPerBrowser from 'App/components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser';
import { FilterKey } from 'Types/filter/filterType'; import { FilterKey } from 'Types/filter/filterType';
import CallWithErrors from '../../Widgets/PredefinedWidgets/CallWithErrors'; import CallWithErrors from '../../Widgets/PredefinedWidgets/CallWithErrors';
import SpeedIndexByLocation from '../../Widgets/PredefinedWidgets/SpeedIndexByLocation';
import ResponseTimeDistribution from '../../Widgets/PredefinedWidgets/ResponseTimeDistribution'; import ResponseTimeDistribution from '../../Widgets/PredefinedWidgets/ResponseTimeDistribution';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
@ -48,6 +49,8 @@ function WidgetPredefinedChart(props: Props) {
return <CallsErrors5xx data={data} metric={metric} />; return <CallsErrors5xx data={data} metric={metric} />;
case FilterKey.CALLS_ERRORS: case FilterKey.CALLS_ERRORS:
return <CallWithErrors isTemplate={isTemplate} data={data} />; return <CallWithErrors isTemplate={isTemplate} data={data} />;
case FilterKey.SPEED_LOCATION:
return <SpeedIndexByLocation data={data} />;
default: default:
return ( return (
<div className="h-40 color-red">{t('Widget not supported')}</div> <div className="h-40 color-red">{t('Widget not supported')}</div>

View file

@ -1,376 +1,395 @@
import React, { useEffect, useState } from 'react'; import React, {useEffect, useState} from 'react';
import { NoContent, Loader, Pagination } from 'UI'; import {NoContent, Loader, Pagination} from 'UI';
import { Button, Tag, Tooltip, Dropdown, message } from 'antd'; import {Button, Tag, Tooltip, Dropdown, message} from 'antd';
import { UndoOutlined, DownOutlined } from '@ant-design/icons'; import {UndoOutlined, DownOutlined} from '@ant-design/icons';
import cn from 'classnames'; import cn from 'classnames';
import { useStore } from 'App/mstore'; import {useStore} from 'App/mstore';
import SessionItem from 'Shared/SessionItem'; import SessionItem from 'Shared/SessionItem';
import { observer } from 'mobx-react-lite'; import {observer} from 'mobx-react-lite';
import { DateTime } from 'luxon'; import {DateTime} from 'luxon';
import { debounce, numberWithCommas } from 'App/utils'; import {debounce, numberWithCommas} from 'App/utils';
import useIsMounted from 'App/hooks/useIsMounted'; import useIsMounted from 'App/hooks/useIsMounted';
import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG'; import AnimatedSVG, {ICONS} from 'Shared/AnimatedSVG/AnimatedSVG';
import { HEATMAP, USER_PATH, FUNNEL } from 'App/constants/card'; import {HEATMAP, USER_PATH, FUNNEL} from 'App/constants/card';
import { useTranslation } from 'react-i18next'; import {useTranslation} from 'react-i18next';
interface Props { interface Props {
className?: string; className?: string;
} }
function WidgetSessions(props: Props) { function WidgetSessions(props: Props) {
const { t } = useTranslation(); const {t} = useTranslation();
const listRef = React.useRef<HTMLDivElement>(null); const listRef = React.useRef<HTMLDivElement>(null);
const { className = '' } = props; const {className = ''} = props;
const [activeSeries, setActiveSeries] = useState('all'); const [activeSeries, setActiveSeries] = useState('all');
const [data, setData] = useState<any>([]); const [data, setData] = useState<any>([]);
const isMounted = useIsMounted(); const isMounted = useIsMounted();
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
// all filtering done through series now // all filtering done through series now
const filteredSessions = getListSessionsBySeries(data, 'all'); const filteredSessions = getListSessionsBySeries(data, 'all');
const { dashboardStore, metricStore, sessionStore, customFieldStore } = const {dashboardStore, metricStore, sessionStore, customFieldStore} =
useStore(); useStore();
const focusedSeries = metricStore.focusedSeriesName; const focusedSeries = metricStore.focusedSeriesName;
const filter = dashboardStore.drillDownFilter; const filter = dashboardStore.drillDownFilter;
const widget = metricStore.instance; const widget = metricStore.instance;
const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat( const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat(
'LLL dd, yyyy HH:mm', 'LLL dd, yyyy HH:mm',
); );
const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat( const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat(
'LLL dd, yyyy HH:mm', 'LLL dd, yyyy HH:mm',
); );
const [seriesOptions, setSeriesOptions] = useState([ const [seriesOptions, setSeriesOptions] = useState([
{ label: t('All'), value: 'all' }, {label: t('All'), value: 'all'},
]); ]);
const hasFilters = const hasFilters =
filter.filters.length > 0 || filter.filters.length > 0 ||
filter.startTimestamp !== dashboardStore.drillDownPeriod.start || filter.startTimestamp !== dashboardStore.drillDownPeriod.start ||
filter.endTimestamp !== dashboardStore.drillDownPeriod.end; filter.endTimestamp !== dashboardStore.drillDownPeriod.end;
const filterText = filter.filters.length > 0 ? filter.filters[0].value : ''; const filterText = filter.filters.length > 0 ? filter.filters[0].value : '';
const metaList = customFieldStore.list.map((i: any) => i.key); const metaList = customFieldStore.list.map((i: any) => i.key);
const seriesDropdownItems = seriesOptions.map((option) => ({ const seriesDropdownItems = seriesOptions.map((option) => ({
key: option.value, key: option.value,
label: ( label: (
<div onClick={() => setActiveSeries(option.value)}>{option.label}</div> <div onClick={() => setActiveSeries(option.value)}>{option.label}</div>
), ),
}));
useEffect(() => {
if (!widget.series) return;
const seriesOptions = widget.series.map((item: any) => ({
label: item.name,
value: item.seriesId ?? item.name,
})); }));
setSeriesOptions([{ label: t('All'), value: 'all' }, ...seriesOptions]);
}, [widget.series.length]);
const fetchSessions = (metricId: any, filter: any) => { useEffect(() => {
if (!isMounted()) return; if (!widget.series) return;
setLoading(true); const seriesOptions = widget.series.map((item: any) => ({
delete filter.eventsOrderSupport; label: item.name,
if (widget.metricType === FUNNEL) { value: item.seriesId ?? item.name,
if (filter.series[0].filter.filters.length === 0) { }));
setLoading(false); setSeriesOptions([{label: t('All'), value: 'all'}, ...seriesOptions]);
return setData([]); }, [widget.series.length]);
}
}
widget const fetchSessions = (metricId: any, filter: any) => {
.fetchSessions(metricId, filter) if (!isMounted()) return;
.then((res: any) => {
setData(res); if (widget.metricType === FUNNEL) {
if (metricStore.drillDown) { if (filter.series[0].filter.filters.length === 0) {
setTimeout(() => { setLoading(false);
message.info(t('Sessions Refreshed!')); return setData([]);
listRef.current?.scrollIntoView({ behavior: 'smooth' }); }
metricStore.setDrillDown(false);
}, 0);
} }
})
.finally(() => {
setLoading(false);
});
};
const fetchClickmapSessions = (customFilters: Record<string, any>) => {
sessionStore.getSessions(customFilters).then((data) => {
setData([{ ...data, seriesId: 1, seriesName: 'Clicks' }]);
});
};
const debounceRequest: any = React.useCallback(
debounce(fetchSessions, 1000),
[],
);
const debounceClickMapSearch = React.useCallback(
debounce(fetchClickmapSessions, 1000),
[],
);
const depsString = JSON.stringify(widget.series);
const loadData = () => { setLoading(true);
if (widget.metricType === HEATMAP && metricStore.clickMapSearch) { const filterCopy = {...filter};
const clickFilter = { delete filterCopy.eventsOrderSupport;
value: [metricStore.clickMapSearch],
type: 'CLICK', try {
operator: 'onSelector', // Handle filters properly with null checks
isEvent: true, if (filterCopy.filters && filterCopy.filters.length > 0) {
// @ts-ignore // Ensure the nested path exists before pushing
filters: [], if (filterCopy.series?.[0]?.filter) {
}; if (!filterCopy.series[0].filter.filters) {
const timeRange = { filterCopy.series[0].filter.filters = [];
rangeValue: dashboardStore.drillDownPeriod.rangeValue, }
startDate: dashboardStore.drillDownPeriod.start, filterCopy.series[0].filter.filters.push(...filterCopy.filters);
endDate: dashboardStore.drillDownPeriod.end, }
}; filterCopy.filters = [];
const customFilter = { }
...filter, } catch (e) {
...timeRange, // do nothing
filters: [...sessionStore.userFilter.filters, clickFilter],
};
debounceClickMapSearch(customFilter);
} else {
const hasStartPoint =
!!widget.startPoint && widget.metricType === USER_PATH;
const onlyFocused = focusedSeries
? widget.series.filter((s) => s.name === focusedSeries)
: widget.series;
const activeSeries = metricStore.disabledSeries.length
? onlyFocused.filter(
(s) => !metricStore.disabledSeries.includes(s.name),
)
: onlyFocused;
const seriesJson = activeSeries.map((s) => s.toJson());
if (hasStartPoint) {
seriesJson[0].filter.filters.push(widget.startPoint.toJson());
}
if (widget.metricType === USER_PATH) {
if (
seriesJson[0].filter.filters[0].value[0] === '' &&
widget.data.nodes
) {
seriesJson[0].filter.filters[0].value = widget.data.nodes[0].name;
} else if (
seriesJson[0].filter.filters[0].value[0] === '' &&
!widget.data.nodes?.length
) {
// no point requesting if we don't have starting point picked by api
return;
} }
} widget
debounceRequest(widget.metricId, { .fetchSessions(metricId, filterCopy)
...filter, .then((res: any) => {
series: seriesJson, setData(res);
page: metricStore.sessionsPage, if (metricStore.drillDown) {
limit: metricStore.sessionsPageSize, setTimeout(() => {
}); message.info(t('Sessions Refreshed!'));
} listRef.current?.scrollIntoView({behavior: 'smooth'});
}; metricStore.setDrillDown(false);
useEffect(() => { }, 0);
metricStore.updateKey('sessionsPage', 1); }
loadData(); })
}, [ .finally(() => {
filter.startTimestamp, setLoading(false);
filter.endTimestamp, });
filter.filters, };
depsString, const fetchClickmapSessions = (customFilters: Record<string, any>) => {
metricStore.clickMapSearch, sessionStore.getSessions(customFilters).then((data) => {
focusedSeries, setData([{...data, seriesId: 1, seriesName: 'Clicks'}]);
widget.startPoint, });
widget.data.nodes, };
metricStore.disabledSeries.length, const debounceRequest: any = React.useCallback(
]); debounce(fetchSessions, 1000),
useEffect(loadData, [metricStore.sessionsPage]); [],
useEffect(() => { );
if (activeSeries === 'all') { const debounceClickMapSearch = React.useCallback(
metricStore.setFocusedSeriesName(null); debounce(fetchClickmapSessions, 1000),
} else { [],
metricStore.setFocusedSeriesName( );
seriesOptions.find((option) => option.value === activeSeries)?.label,
false,
);
}
}, [activeSeries]);
useEffect(() => {
if (focusedSeries) {
setActiveSeries(
seriesOptions.find((option) => option.label === focusedSeries)?.value ||
'all',
);
} else {
setActiveSeries('all');
}
}, [focusedSeries]);
const clearFilters = () => { const depsString = JSON.stringify(widget.series);
metricStore.updateKey('sessionsPage', 1);
dashboardStore.resetDrillDownFilter();
};
return ( const loadData = () => {
<div if (widget.metricType === HEATMAP && metricStore.clickMapSearch) {
className={cn( const clickFilter = {
className, value: [metricStore.clickMapSearch],
'bg-white p-3 pb-0 rounded-xl shadow-sm border mt-3', type: 'CLICK',
)} operator: 'onSelector',
> isEvent: true,
<div className="flex items-center justify-between"> // @ts-ignore
<div> filters: [],
<div className="flex items-baseline gap-2"> };
<h2 className="text-xl"> const timeRange = {
{metricStore.clickMapSearch ? t('Clicks') : t('Sessions')} rangeValue: dashboardStore.drillDownPeriod.rangeValue,
</h2> startDate: dashboardStore.drillDownPeriod.start,
<div className="ml-2 color-gray-medium"> endDate: dashboardStore.drillDownPeriod.end,
{metricStore.clickMapLabel };
? `on "${metricStore.clickMapLabel}" ` const customFilter = {
: null} ...filter,
{t('between')}{' '} ...timeRange,
<span className="font-medium color-gray-darkest"> filters: [...sessionStore.userFilter.filters, clickFilter],
};
debounceClickMapSearch(customFilter);
} else {
const hasStartPoint =
!!widget.startPoint && widget.metricType === USER_PATH;
const onlyFocused = focusedSeries
? widget.series.filter((s) => s.name === focusedSeries)
: widget.series;
const activeSeries = metricStore.disabledSeries.length
? onlyFocused.filter(
(s) => !metricStore.disabledSeries.includes(s.name),
)
: onlyFocused;
const seriesJson = activeSeries.map((s) => s.toJson());
if (hasStartPoint) {
seriesJson[0].filter.filters.push(widget.startPoint.toJson());
}
if (widget.metricType === USER_PATH) {
if (
seriesJson[0].filter.filters[0].value[0] === '' &&
widget.data.nodes?.length
) {
seriesJson[0].filter.filters[0].value = widget.data.nodes[0].name;
} else if (
seriesJson[0].filter.filters[0].value[0] === '' &&
!widget.data.nodes?.length
) {
// no point requesting if we don't have starting point picked by api
return;
}
}
debounceRequest(widget.metricId, {
...filter,
series: seriesJson,
page: metricStore.sessionsPage,
limit: metricStore.sessionsPageSize,
});
}
};
useEffect(() => {
metricStore.updateKey('sessionsPage', 1);
loadData();
}, [
filter.startTimestamp,
filter.endTimestamp,
filter.filters,
depsString,
metricStore.clickMapSearch,
focusedSeries,
widget.startPoint,
widget.data.nodes,
metricStore.disabledSeries.length,
]);
useEffect(loadData, [metricStore.sessionsPage]);
useEffect(() => {
if (activeSeries === 'all') {
metricStore.setFocusedSeriesName(null);
} else {
metricStore.setFocusedSeriesName(
seriesOptions.find((option) => option.value === activeSeries)?.label,
false,
);
}
}, [activeSeries]);
useEffect(() => {
if (focusedSeries) {
setActiveSeries(
seriesOptions.find((option) => option.label === focusedSeries)?.value ||
'all',
);
} else {
setActiveSeries('all');
}
}, [focusedSeries]);
const clearFilters = () => {
metricStore.updateKey('sessionsPage', 1);
dashboardStore.resetDrillDownFilter();
};
return (
<div
className={cn(
className,
'bg-white p-3 pb-0 rounded-xl shadow-sm border mt-3',
)}
>
<div className="flex items-center justify-between">
<div>
<div className="flex items-baseline gap-2">
<h2 className="text-xl">
{metricStore.clickMapSearch ? t('Clicks') : t('Sessions')}
</h2>
<div className="ml-2 color-gray-medium">
{metricStore.clickMapLabel
? `on "${metricStore.clickMapLabel}" `
: null}
{t('between')}{' '}
<span className="font-medium color-gray-darkest">
{startTime} {startTime}
</span>{' '} </span>{' '}
{t('and')}{' '} {t('and')}{' '}
<span className="font-medium color-gray-darkest"> <span className="font-medium color-gray-darkest">
{endTime} {endTime}
</span>{' '} </span>{' '}
</div> </div>
{hasFilters && ( {hasFilters && (
<Tooltip title={t('Clear Drilldown')} placement="top"> <Tooltip title={t('Clear Drilldown')} placement="top">
<Button type="text" size="small" onClick={clearFilters}> <Button type="text" size="small" onClick={clearFilters}>
<UndoOutlined /> <UndoOutlined/>
</Button> </Button>
</Tooltip> </Tooltip>
)} )}
</div> </div>
{hasFilters && widget.metricType === 'table' && ( {hasFilters && widget.metricType === 'table' && (
<div className="py-2"> <div className="py-2">
<Tag <Tag
closable closable
onClose={clearFilters} onClose={clearFilters}
className="truncate max-w-44 rounded-lg" className="truncate max-w-44 rounded-lg"
> >
{filterText} {filterText}
</Tag> </Tag>
</div> </div>
)} )}
</div> </div>
<div className="flex items-center gap-4"> <div className="flex items-center gap-4">
{widget.metricType !== 'table' && widget.metricType !== HEATMAP && ( {widget.metricType !== 'table' && widget.metricType !== HEATMAP && (
<div className="flex items-center ml-6"> <div className="flex items-center ml-6">
<span className="mr-2 color-gray-medium"> <span className="mr-2 color-gray-medium">
{t('Filter by Series')} {t('Filter by Series')}
</span> </span>
<Dropdown <Dropdown
menu={{ menu={{
items: seriesDropdownItems, items: seriesDropdownItems,
selectable: true, selectable: true,
selectedKeys: [activeSeries], selectedKeys: [activeSeries],
}} }}
trigger={['click']} trigger={['click']}
> >
<Button type="text" size="small"> <Button type="text" size="small">
{seriesOptions.find((option) => option.value === activeSeries) {seriesOptions.find((option) => option.value === activeSeries)
?.label || t('Select Series')} ?.label || t('Select Series')}
<DownOutlined /> <DownOutlined/>
</Button> </Button>
</Dropdown> </Dropdown>
</div> </div>
)} )}
</div>
</div>
<div className="mt-3">
<Loader loading={loading}>
<NoContent
title={
<div className="flex items-center justify-center flex-col">
<AnimatedSVG name={ICONS.NO_SESSIONS} size={60} />
<div className="mt-4" />
<div className="text-center">
{t('No relevant sessions found for the selected time period')}
</div> </div>
</div> </div>
}
show={filteredSessions.sessions.length === 0}
>
{filteredSessions.sessions.map((session: any) => (
<React.Fragment key={session.sessionId}>
<SessionItem
disableUser
session={session}
metaList={metaList}
/>
<div className="border-b" />
</React.Fragment>
))}
<div <div className="mt-3">
className="flex items-center justify-between p-5" <Loader loading={loading}>
ref={listRef} <NoContent
> title={
<div> <div className="flex items-center justify-center flex-col">
{t('Showing')}{' '} <AnimatedSVG name={ICONS.NO_SESSIONS} size={60}/>
<span className="font-medium"> <div className="mt-4"/>
<div className="text-center">
{t('No relevant sessions found for the selected time period')}
</div>
</div>
}
show={filteredSessions.sessions.length === 0}
>
{filteredSessions.sessions.map((session: any) => (
<React.Fragment key={session.sessionId}>
<SessionItem
disableUser
session={session}
metaList={metaList}
/>
<div className="border-b"/>
</React.Fragment>
))}
<div
className="flex items-center justify-between p-5"
ref={listRef}
>
<div>
{t('Showing')}{' '}
<span className="font-medium">
{(metricStore.sessionsPage - 1) * {(metricStore.sessionsPage - 1) *
metricStore.sessionsPageSize + metricStore.sessionsPageSize +
1} 1}
</span>{' '} </span>{' '}
{t('to')}{' '} {t('to')}{' '}
<span className="font-medium"> <span className="font-medium">
{(metricStore.sessionsPage - 1) * {(metricStore.sessionsPage - 1) *
metricStore.sessionsPageSize + metricStore.sessionsPageSize +
filteredSessions.sessions.length} filteredSessions.sessions.length}
</span>{' '} </span>{' '}
{t('of')}{' '} {t('of')}{' '}
<span className="font-medium"> <span className="font-medium">
{numberWithCommas(filteredSessions.total)} {numberWithCommas(filteredSessions.total)}
</span>{' '} </span>{' '}
{t('sessions.')} {t('sessions.')}
</div> </div>
<Pagination <Pagination
page={metricStore.sessionsPage} page={metricStore.sessionsPage}
total={filteredSessions.total} total={filteredSessions.total}
onPageChange={(page: any) => onPageChange={(page: any) =>
metricStore.updateKey('sessionsPage', page) metricStore.updateKey('sessionsPage', page)
} }
limit={metricStore.sessionsPageSize} limit={metricStore.sessionsPageSize}
debounceRequest={500} debounceRequest={500}
/> />
</div>
</NoContent>
</Loader>
</div> </div>
</NoContent> </div>
</Loader> );
</div>
</div>
);
} }
const getListSessionsBySeries = (data: any, seriesId: any) => { const getListSessionsBySeries = (data: any, seriesId: any) => {
const arr = data.reduce( const arr = data.reduce(
(arr: any, element: any) => { (arr: any, element: any) => {
if (seriesId === 'all') { if (seriesId === 'all') {
const sessionIds = arr.sessions.map((i: any) => i.sessionId); const sessionIds = arr.sessions.map((i: any) => i.sessionId);
const sessions = element.sessions.filter( const sessions = element.sessions.filter(
(i: any) => !sessionIds.includes(i.sessionId), (i: any) => !sessionIds.includes(i.sessionId),
); );
arr.sessions.push(...sessions); arr.sessions.push(...sessions);
} else if (element.seriesId === seriesId) { } else if (element.seriesId === seriesId) {
const sessionIds = arr.sessions.map((i: any) => i.sessionId); const sessionIds = arr.sessions.map((i: any) => i.sessionId);
const sessions = element.sessions.filter( const sessions = element.sessions.filter(
(i: any) => !sessionIds.includes(i.sessionId), (i: any) => !sessionIds.includes(i.sessionId),
); );
const duplicates = element.sessions.length - sessions.length; const duplicates = element.sessions.length - sessions.length;
arr.sessions.push(...sessions); arr.sessions.push(...sessions);
arr.total = element.total - duplicates; arr.total = element.total - duplicates;
} }
return arr; return arr;
}, },
{ sessions: [] }, {sessions: []},
); );
arr.total = arr.total =
seriesId === 'all' seriesId === 'all'
? Math.max(...data.map((i: any) => i.total)) ? Math.max(...data.map((i: any) => i.total))
: data.find((i: any) => i.seriesId === seriesId).total; : data.find((i: any) => i.seriesId === seriesId).total;
return arr; return arr;
}; };
export default observer(WidgetSessions); export default observer(WidgetSessions);

View file

@ -92,6 +92,9 @@ function WidgetView({
filter: { filters: selectedCard.filters }, filter: { filters: selectedCard.filters },
}), }),
]; ];
} else if (selectedCard.cardType === TABLE) {
cardData.series = [new FilterSeries()];
cardData.series[0].filter.eventsOrder = 'and';
} }
if (selectedCard.cardType === FUNNEL) { if (selectedCard.cardType === FUNNEL) {
cardData.series = [new FilterSeries()]; cardData.series = [new FilterSeries()];

Some files were not shown because too many files have changed in this diff Show more