Compare commits

...
Sign in to create a new pull request.

138 commits

Author SHA1 Message Date
nick-delirium
90510aa33b ui: fix double metric selection in list 2025-06-06 16:19:54 +02:00
GitHub Action
96a70f5d41 Increment frontend chart version to v1.22.42 2025-06-04 11:41:56 +02:00
rjshrjndrn
d4a13edcf0 fix(actions): frontend image with proper tag
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-06-04 11:33:19 +02:00
GitHub Action
51fad91a22 Increment frontend chart version to v1.22.41 2025-06-04 10:48:50 +02:00
nick-delirium
36abcda1e1 ui: fix audioplayer start point 2025-06-04 10:39:08 +02:00
Mehdi Osman
dd5f464f73
Increment frontend chart version to v1.22.40 (#3479)
Co-authored-by: GitHub Action <action@github.com>
2025-06-03 16:22:12 +02:00
Delirium
f9ada41272
ui: recreate period on db visit (#3478) 2025-06-03 16:05:52 +02:00
rjshrjndrn
9e24a3583e feat(nginx): add integrations endpoint with CORS support
Add new /integrations/ location block that proxies requests to
integrations-openreplay:8080 service. Includes proper CORS headers
for cross-origin requests and WebSocket upgrade support.

- Rewrite /integrations/ path to root
- Configure proxy headers for forwarding
- Set connection timeouts for stability
- Add CORS headers for API access

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-06-02 10:55:50 +02:00
Taha Yassine Kraiem
0a3129d3cd fix(chalice): fixed JIRA integration 2025-05-30 15:25:41 +02:00
Mehdi Osman
99d61db9d9
Increment frontend chart version to v1.22.39 (#3460)
Co-authored-by: GitHub Action <action@github.com>
2025-05-30 15:07:29 +02:00
Delirium
133958622e
ui: fix alert create button (#3459) 2025-05-30 14:56:21 +02:00
GitHub Action
fb021f606f Increment frontend chart version to v1.22.38 2025-05-29 12:21:04 +02:00
rjshrjndrn
a2905fa8ed fix: move cd - command after git operations in patch workflow
Move the directory restoration command after the git operations to
ensure all git commands execute in the correct working directory
before returning to the previous directory.

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-29 12:16:28 +02:00
rjshrjndrn
beec2283fd refactor(ci): restructure patch-build workflow script
- Extract inline bash script into structured functions
- Add proper error handling with set -euo pipefail
- Improve variable scoping with readonly and local declarations
- Add descriptive function names and comments
- Fix shell quoting and parameter expansion
- Consolidate build logic into reusable functions
- Add proper cleanup of temporary files
- Improve readability and maintainability of the CI script

The refactored script maintains the same functionality while being
more robust and easier to understand.

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-29 12:16:28 +02:00
GitHub Action
6c8b55019e Increment frontend chart version 2025-05-29 10:29:46 +02:00
rjshrjndrn
e3e3e11227 fix(action): proper registry
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-29 10:18:55 +02:00
Shekar Siri
c6f7de04cc Revert "fix(ui): new card data state is not updating"
This reverts commit 2921c17cbf.
2025-05-28 22:16:00 +02:00
Shekar Siri
2921c17cbf fix(ui): new card data state is not updating 2025-05-28 19:49:01 +02:00
Mehdi Osman
7eb3f5c4c8
Increment frontend chart version (#3436)
Co-authored-by: GitHub Action <action@github.com>
2025-05-26 16:10:35 +02:00
Rajesh Rajendran
5a9a8e588a
chore(actions): rebase only if not main (#3435)
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-26 16:04:50 +02:00
Rajesh Rajendran
4b14258266
fix(action): clone repo (#3433)
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-26 15:50:13 +02:00
Rajesh Rajendran
744d2d4311
actions fix or 2070 (#3432)
* chore(build): Better error handling

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* fix(build): remove fetch depth, as it might cause issue in rebase

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* fix(build): proper platform

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

---------

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-26 15:45:48 +02:00
Taha Yassine Kraiem
64242a5dc0 refactor(DB): changed supported platforms in CH 2025-05-26 11:51:49 +02:00
Rajesh Rajendran
cae3002697
feat(ci): Support building from branch for old patch (#3419)
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-20 15:19:04 +02:00
GitHub Action
3d3c62196b Increment frontend chart version 2025-05-20 11:44:16 +02:00
nick-delirium
e810958a5d ui: fix ant imports 2025-05-20 11:26:20 +02:00
nick-delirium
39fa9787d1 ui: prevent network row modal from changing replayer time 2025-05-20 11:21:50 +02:00
nick-delirium
c9c1ad4dde ui: comments etc 2025-05-20 11:21:50 +02:00
nick-delirium
d9868928be ui: improve network panel row mapping 2025-05-20 11:21:50 +02:00
GitHub Action
a460d8c9a2 Increment frontend chart version 2025-05-15 15:18:19 +02:00
nick-delirium
930417aab4 ui: fix session search on url change 2025-05-15 15:12:30 +02:00
GitHub Action
07bc184f4d Increment chalice chart version 2025-05-14 18:59:43 +02:00
Rajesh Rajendran
71b7cca569
Patch/api v1.22.0 (#3401)
* fix(chalice): fixed duplicate autocomplete values

* ci(actions): possible fix for pull --rebase

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

---------

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
Co-authored-by: Taha Yassine Kraiem <tahayk2@gmail.com>
2025-05-14 18:42:25 +02:00
Mehdi Osman
355d27eaa0
Increment frontend chart version (#3397)
Co-authored-by: GitHub Action <action@github.com>
2025-05-13 13:38:15 +02:00
Mehdi Osman
66b485cccf
Increment db chart version (#3396)
Co-authored-by: GitHub Action <action@github.com>
2025-05-13 10:34:28 +02:00
Alexander
de33a42151
feat(db): custom event's ts (#3395) 2025-05-12 17:52:24 +02:00
Rajesh Rajendran
f12bdebf82
ci(actions): fix push denied (#3392) (#3393) (#3394)
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-12 17:19:41 +02:00
Rajesh Rajendran
bbfa20c693
ci(actions): fix push denied (#3392) (#3393)
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-12 16:58:19 +02:00
Rajesh Rajendran
f264ba043d
ci(actions): fix push denied (#3392)
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-12 16:55:23 +02:00
Rajesh Rajendran
a05dce8125
main (#3391)
* ci(actions): Update pr description

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* ci(actions): run only on pull request merge

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

---------

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-12 16:50:20 +02:00
Mehdi Osman
3a1635d81f
Increment frontend chart version (#3389)
Co-authored-by: GitHub Action <action@github.com>
2025-05-12 16:12:43 +02:00
Delirium
ccb332c636
ui: change <slot> check (#3388) 2025-05-12 16:02:26 +02:00
Rajesh Rajendran
80ffa15959
ci(actions): Auto update tag for patch build (#3387)
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-12 15:54:10 +02:00
Rajesh Rajendran
b2e961d621
ci(actions): Auto update tag for patch build (#3386)
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-05-12 15:49:19 +02:00
Mehdi Osman
b4d0598f23
Increment frontend chart version (#3385)
Co-authored-by: GitHub Action <action@github.com>
2025-05-12 15:46:29 +02:00
Delirium
e77f083f10
ui: fixup toggler closing (#3384) 2025-05-12 15:40:30 +02:00
Delirium
58da1d3f64
fix litjs support, fix autocomplete modal options reset, fix dashboard chart density (#3382)
* Litjs fixes2 (#3381)

* ui: fixes for litjs capture

* ui: introduce vmode for lwc light dom

* ui: fixup the mode toggle and remover

* ui: fix filter options reset, fix dashboard chart density
2025-05-12 15:27:44 +02:00
GitHub Action
447fc26a2a Increment frontend chart version 2025-05-12 10:46:33 +02:00
nick-delirium
9bdf6e4f92 ui: fix heatmaps crash 2025-05-12 10:37:48 +02:00
GitHub Action
01f403e12d Increment chalice chart version 2025-05-07 12:28:44 +02:00
Taha Yassine Kraiem
39eb943b86 fix(chalice): fixed get error's details 2025-05-07 12:15:33 +02:00
GitHub Action
366b0d38b0 Increment frontend chart version 2025-05-06 16:28:28 +02:00
nick-delirium
f4d5b3c06e ui: fix max meta length, add horizontal layout for player 2025-05-06 16:23:47 +02:00
Mehdi Osman
93ae18133e
Increment frontend chart version (#3366)
Co-authored-by: GitHub Action <action@github.com>
2025-05-06 13:16:57 +02:00
Andrey Babushkin
fbe5d78270
Revert update (#3365)
* Revert "Increment chalice chart version"

This reverts commit 5e0e5730ba.

* revert updates

* changed chalice version
2025-05-06 13:08:08 +02:00
Mehdi Osman
b803eed1d4
Increment frontend chart version (#3362)
Co-authored-by: GitHub Action <action@github.com>
2025-05-05 17:49:39 +02:00
Andrey Babushkin
9ed3cb1b7e
Add searched events (#3361)
* add filtered events to search

* removed consoles

* changed styles to tailwind

* changed styles to tailwind

* fixed errors
2025-05-05 17:40:10 +02:00
GitHub Action
5e0e5730ba Increment chalice chart version 2025-05-05 17:04:29 +02:00
Taha Yassine Kraiem
d78b33dcd2 refactor(DB): remove TTL for CH tables 2025-05-05 16:49:37 +02:00
Taha Yassine Kraiem
4b1ca200b4 fix(chalice): fixed empty error_id for table of errors 2025-05-05 16:49:37 +02:00
rjshrjndrn
08d930f9ff fix(docker-compose): proper volume path #3279
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-04-28 17:28:40 +02:00
Mehdi Osman
da37809bc8
Increment frontend chart version (#3345)
Co-authored-by: GitHub Action <action@github.com>
2025-04-28 11:38:04 +02:00
Andrey Babushkin
d922fc7ad5
Patch frontend inline css (#3344)
* add inlineCss enum

* updated changelog
2025-04-28 11:29:53 +02:00
GitHub Action
796360fdd2 Increment frontend chart version 2025-04-28 11:01:55 +02:00
nick-delirium
13dbb60d8b ui: fix velement applychanges 2025-04-28 10:40:11 +02:00
Андрей Бабушкин
9e20a49128 add slot tag to custom elements 2025-04-28 10:34:43 +02:00
nick-delirium
91f8cc1399 ui: move debouncecall 2025-04-28 10:34:43 +02:00
Andrey Babushkin
f8ba3f6d89 Css batching (#3326)
* tracker: initial css inlining functionality

* tracker: add tests, adjust sheet id, stagger rule sending

* ui: rereoute custom html component fragments

* removed sorting

---------

Co-authored-by: nick-delirium <nikita@openreplay.com>
2025-04-28 10:34:43 +02:00
Delirium
85e30b3692 tracker css batching/inlining (#3334)
* tracker: initial css inlining functionality

* tracker: add tests, adjust sheet id, stagger rule sending

* removed sorting

* upgrade css inliner

* ui: better logging for ocunter

* tracker: force-fetch mode for cssInliner

* tracker: fix ts warns

* tracker: use debug opts

* tracker: 16.2.0 changelogs, inliner opts

* tracker: remove debug options

---------

Co-authored-by: Андрей Бабушкин <andreybabushkin2000@gmail.com>
2025-04-28 10:34:43 +02:00
nick-delirium
0360e3726e ui: fixup autoplay on inactive tabs 2025-04-28 10:34:43 +02:00
nick-delirium
77bbb5af36 tracker: update css inject 2025-04-28 10:34:43 +02:00
Andrey Babushkin
ab0d4cfb62 Css inliner tuning (#3337)
* tracker: don't send double sheets

* tracker: don't send double sheets

* tracker: slot checker

* add slot tag to custom elements

---------

Co-authored-by: nick-delirium <nikita@openreplay.com>
2025-04-28 10:34:43 +02:00
Andrey Babushkin
3fd506a812 Css batching (#3326)
* tracker: initial css inlining functionality

* tracker: add tests, adjust sheet id, stagger rule sending

* ui: rereoute custom html component fragments

* removed sorting

---------

Co-authored-by: nick-delirium <nikita@openreplay.com>
2025-04-28 10:34:43 +02:00
Shekar Siri
e8432e2dec change(ui): force the table cards events order to use and istead the defaul then 2025-04-24 10:09:19 +02:00
GitHub Action
5c76a8524c Increment frontend chart version 2025-04-23 18:41:46 +02:00
rjshrjndrn
3ba40a4811 feat(cli): Add support for image versions
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-04-23 17:52:50 +02:00
rjshrjndrn
f9a3f24590 fix(docker-compose): clickhouse migration
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-04-23 17:52:50 +02:00
rjshrjndrn
85d6d0abac fix(docker-compose): remove shell interpolation
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-04-23 17:52:50 +02:00
Rajesh Rajendran
b3594136ce or 1940 upstream docker release with the existing installation (#3316)
* chore(docker): Adding dynamic env generator
* ci(make): Create deployment yamls
* ci(make): Generating docker envs
* change env name structure
* proper env names
* chore(docker): clickhouse
* chore(docker-compose): generate env file format
* chore(docker-compose): Adding docker-compose
* chore(docker-compose): format make
* chore(docker-compose): Update version
* chore(docker-compose): adding new secrets
* ci(make): default target
* ci(Makefile): Update common protocol
* chore(docker-compose): refactor folder structure
* ci(make): rename to docker-envs
* feat(docker): add clickhouse volume definition
Add clickhouse persistent volume to the docker-compose configuration
to ensure data is preserved between container restarts.
* refactor: move env files to docker-envs directory
Updates all environment file references in docker-compose.yaml to use a
consistent directory structure, placing them under the docker-envs/
directory for better organization.
* fix(docker): rename imagestorage to images
 The `imagestorage` service and related environment file
 have been renamed to `images` for clarity and consistency.
 This change reflects the service's purpose of handling
 images.
* feat(docker): introduce docker-compose template
 A new docker-compose template
 to generate docker-compose files from a list of services.
 The template uses helm syntax.
* fix: Properly set FILES variable in Makefile
 The FILES variable was not being set correctly in the
 Makefile due to subshell issues. This commit fixes the
 variable assignment and ensures that the variable is
 accessible in subsequent commands.
* feat: Refactor docker-compose template for local development
 This commit introduces a complete overhaul of the
 docker-compose template, switching from a helm-based
 template to a native docker-compose.yml file. This
 change simplifies local development and makes it easier
 to manage the OpenReplay stack.
 The new template includes services for:
 - PostgreSQL
 - ClickHouse
 - Redis
 - MinIO
 - Nginx
 - Caddy
 It also includes migration jobs for setting up the
 database and MinIO.
* fix(docker-compose): Add fallback empty environment
 Add an empty environment to the docker-compose template to prevent
 errors when the env_file is missing. This ensures that the
 container can start even if the environment file is not present.
* feat(docker): Add domainname and aliases to services
 This change adds the `domainname` and `aliases` attributes to each
 service in the docker-compose.yaml file. This is to ensure that
 the services can communicate with each other using their fully
 qualified domain names. Also adds shared volume and empty
 environment variables.
* update version
* chore(docker): don't pull parallel
* chore(docker-compose): proper pull
* chore(docker-compose): Update db service urls
* fix(docker-compose): clickhouse url
* chore(clickhouse): Adding clickhouse db migration
* chore(docker-compose): Adding clickhouse
* fix(tpl): variable injection
* chore(fix): compose tpl variable rendering
* chore(docker-compose): Allow override pg variable
* chore(helm): remove assist-server
* chore(helm): pg integrations
* chore(nginx): removed services
* chore(docker-compose): Mulitple aliases
* chore(docker-compose): Adding more env vars
* feat(install): Dynamically generate passwords
 dynamic password generation by
 identifying `change_me_*` entries in `common.env` and
 replacing them with random passwords. This enhances
 security and simplifies initial setup.
 The changes include:
 - Replacing hardcoded password replacements with a loop
   that iterates through all `change_me_*` entries.
 - Using `grep` to find all `change_me_*` tokens.
 - Generating a random password for each token.
 - Updating the `common.env` file with the generated
   passwords.
* chore(docker-compose): disable clickhouse password
* fix(docker-compose): clickhouse-migration
* compose: chalice env
* chore(docker-compose): overlay vars
* chore(docker): Adding ch port
* chore(docker-compose): disable clickhouse password
* fix(docker-compose): migration name
* feat(docker): skip specific values
* chore(docker-compose): define namespace
---------

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-04-23 17:52:50 +02:00
GitHub Action
8f67edde8d Increment chalice chart version 2025-04-23 12:26:20 +02:00
Taha Yassine Kraiem
74ed29915b fix(chalice): enforce AND operator for table of requests and table of pages 2025-04-23 11:51:38 +02:00
GitHub Action
3ca71ec211 Increment chalice chart version 2025-04-22 19:23:11 +02:00
Taha Yassine Kraiem
0e469fd056 fix(chalice): fixes for table of requests 2025-04-22 19:03:35 +02:00
KRAIEM Taha Yassine
a8cb0e1643 fix(chalice): fixes for table of requests 2025-04-22 19:03:35 +02:00
GitHub Action
e171f0d8d5 Increment frontend chart version 2025-04-22 17:56:00 +02:00
nick-delirium
68ea291444 ui: fix timepicker and timezone interactions 2025-04-22 17:42:56 +02:00
GitHub Action
05cbb831c7 Increment frontend chart version 2025-04-22 10:32:00 +02:00
nick-delirium
5070ded1f4 ui: fix empty sank sessions fetch 2025-04-22 10:27:16 +02:00
GitHub Action
77610a4924 Increment frontend chart version 2025-04-16 17:45:25 +02:00
nick-delirium
7c34e4a0f6 ui: virtualizer for filter options list 2025-04-16 17:36:34 +02:00
GitHub Action
330e21183f Increment frontend chart version 2025-04-15 18:25:49 +02:00
Shekar Siri
30ce37896c feat(widget-sessions): improve session filtering logic
- Refactored session filtering logic to handle nested filters properly.
- Enhanced `fetchSessions` to ensure null checks and avoid errors.
- Updated `loadData` to handle `USER_PATH` and `HEATMAP` metric types.
- Improved UI consistency by adjusting spacing and formatting.
- Replaced redundant code with cleaner, more maintainable patterns.

This change improves the reliability and readability of the session
filtering and loading logic in the WidgetSessions component.
2025-04-15 18:15:03 +02:00
Andrey Babushkin
80a7817e7d
removed sorting by id (#3305) 2025-04-15 13:32:53 +02:00
Jorgen Evens
1b9c568cb1 fix(helm): fix broken volumeMounts indentation 2025-04-14 15:51:41 +02:00
GitHub Action
3759771ae9 Increment frontend chart version 2025-04-14 12:06:09 +02:00
Shekar Siri
f6ae5aba88 feat(SessionsBy): add specific filter for FETCH metric
Added a conditional check to handle the FETCH metric in the SessionsBy
component. When the metric is FETCH, a specific filter with key
FETCH_URL, operator is, and value derived from data.name is applied.
This ensures proper filtering behavior for FETCH-related metrics.
2025-04-14 12:01:51 +02:00
Mehdi Osman
5190dc512a
Increment frontend chart version (#3297)
Co-authored-by: GitHub Action <action@github.com>
2025-04-14 11:54:25 +02:00
Andrey Babushkin
3fcccb51e8
Patch assist (#3296)
* add global method support

* fix errors

* remove wrong updates

* remove wrong updates

* add onDrag as option

* fix wrong updates
2025-04-14 11:33:06 +02:00
GitHub Action
26077d5689 Increment frontend chart version 2025-04-11 14:56:11 +02:00
Shekar Siri
00c57348fd feat(search): enhance filter value handling
- Added `checkFilterValue` function to validate and update filter values
  in `SearchStoreLive`.
- Updated `FilterItem` to handle undefined `value` gracefully by providing
  a default empty array.

These changes improve robustness in filter value processing.
2025-04-11 14:36:25 +02:00
Shekar Siri
1f9bc5520a feat(search): add rounding to next minutes for date ranges
- Introduced `roundToNextMinutes` utility function to round timestamps
  to the next specified minute interval.
- Updated `Search` class to use the rounding function for non-custom
  date ranges.
- Modified `getRange` in `period.js` to align LAST_24_HOURS with
  15-minute intervals.
- Added `roundToNextMinutes` implementation in `utils/index.ts`.
2025-04-11 12:01:15 +02:00
Shekar Siri
aef94618f6 Revert "Increment frontend chart version"
This reverts commit 2a330318c7.
2025-04-11 11:03:01 +02:00
GitHub Action
2a330318c7 Increment frontend chart version 2025-04-11 11:01:53 +02:00
Shekar Siri
6777d5ce2a feat(dashboard): set initial drill down period
Change default drill down period from LAST_7_DAYS to LAST_24_HOURS
and preserve current period when drilling down on chart click
2025-04-11 10:49:17 +02:00
rjshrjndrn
8a6f8fe91f chore(action): cloning specific tag
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-04-10 15:45:50 +02:00
Mehdi Osman
7b078fed4c
Increment frontend chart version (#3278)
Co-authored-by: GitHub Action <action@github.com>
2025-04-07 15:24:32 +02:00
Andrey Babushkin
894d4c84b3
Patch assist canvas (#3277)
* resolved conflict

* removed comments
2025-04-07 15:13:36 +02:00
Alexander
46390a3ba9
feat(assist-server): added the github action (#3275) 2025-04-07 10:43:48 +02:00
rjshrjndrn
621667f5ce ci(action): Build and patch github tags
feat(workflow): update commit timestamp for patching

Add a step to set the commit timestamp of the HEAD commit to be 1
second newer than the oldest of the last 3 commits. This ensures
proper chronological order while preserving the commit content.

- Fetch deeper history to access commit history
- Get oldest timestamp from recent commits
- Set new commit date with BSD-compatible date command
- Verify timestamp change with git log

The workflow was previously checking out 'main' branch with a
comment indicating it needed to be fixed. This change makes it
properly checkout the tag specified by the workflow input.

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-04-04 16:09:05 +02:00
rjshrjndrn
a72f476f1c chore(ci): tag patching
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-04-04 13:15:56 +02:00
Mehdi Osman
623946ce4e
Increment assist chart version (#3267)
Co-authored-by: GitHub Action <action@github.com>
2025-04-03 13:29:02 -04:00
Mehdi Osman
2d099214fc
Increment frontend chart version (#3266)
Co-authored-by: GitHub Action <action@github.com>
2025-04-03 18:27:05 +02:00
Andrey Babushkin
b0e7054f89
Assist patch canvas (#3265)
* add agent info to assist and tracker

* removed AGENTS_CONNECTED event
2025-04-03 18:22:08 +02:00
Mehdi Osman
a9097270af
Increment chalice chart version (#3260)
Co-authored-by: GitHub Action <action@github.com>
2025-04-02 16:43:46 +02:00
Alexander
5d514ddaf2
feat(chalice): added for_spot=True for authenticate_sso (#3259) 2025-04-02 16:35:19 +02:00
Mehdi Osman
43688bb03b
Increment assist chart version (#3256)
Co-authored-by: GitHub Action <action@github.com>
2025-04-01 16:04:41 +02:00
Mehdi Osman
e050cee7bb
Increment frontend chart version (#3255)
Co-authored-by: GitHub Action <action@github.com>
2025-03-31 18:19:52 +02:00
Andrey Babushkin
6b35df7125
pulled updates (#3254) 2025-03-31 18:13:51 +02:00
GitHub Action
8e099b6dc3 Increment frontend chart version 2025-03-31 17:25:58 +02:00
nick-delirium
c0a4734054 ui: fix double fetches for sessions 2025-03-31 17:19:33 +02:00
GitHub Action
7de1efb5fe Increment frontend chart version 2025-03-31 12:08:45 +02:00
nick-delirium
d4ff28ddbe ui: fix modules label 2025-03-31 11:54:13 +02:00
nick-delirium
b2256f72d0 ui: fix modules mapper 2025-03-31 11:48:14 +02:00
GitHub Action
a63bda1c79 Increment frontend chart version 2025-03-31 11:17:34 +02:00
nick-delirium
3a0176789e ui: filter keys 2025-03-31 10:34:02 +02:00
nick-delirium
f2b7271fca ui: add old devtool filters 2025-03-31 10:31:06 +02:00
GitHub Action
d50f89662b Increment frontend chart version 2025-03-28 21:37:59 +01:00
GitHub Action
35051d201c Increment assist chart version 2025-03-28 21:37:59 +01:00
rjshrjndrn
214be95ecc fix(init): remove duplicate clone
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-03-28 21:25:24 +01:00
Delirium
dbc142c114
UI patches (28.03) (#3231)
* ui: force getting url for location in tabmanagers

* Assist add turn servers (#3229)

* fixed conflicts

* add offers

* add config to sicket query

* add config to sicket query

* add config init

* removed console logs

* removed wrong updates

* fixed conflicts

* add offers

* add config to sicket query

* add config to sicket query

* add config init

* removed console logs

* removed wrong updates

* ui: fix chat draggable, fix default params

---------

Co-authored-by: nick-delirium <nikita@openreplay.com>

* ui: fix spritemap generation for assist sessions

* ui: fix yarnlock

* fix errors

* updated widget link

* resolved conflicts

* updated widget url

---------

Co-authored-by: Andrey Babushkin <55714097+reyand43@users.noreply.github.com>
Co-authored-by: Андрей Бабушкин <andreybabushkin2000@gmail.com>
2025-03-28 17:32:12 +01:00
GitHub Action
443f5e8f08 Increment frontend chart version 2025-03-27 12:36:54 +01:00
Shekar Siri
9f693f220d refactor(auth): separate SSO support from enterprise edition
Add dedicated isSSOSupported property to correctly identify when SSO
authentication is available, properly handling the 'msaas' edition
case separately from enterprise edition checks. This fixes SSO
visibility in the login interface.
2025-03-27 12:28:10 +01:00
GitHub Action
5ab30380b0 Increment chalice chart version 2025-03-26 17:48:08 +01:00
Taha Yassine Kraiem
fc86555644 refactor(chalice): changed user-journey 2025-03-26 17:18:17 +01:00
GitHub Action
2a3c611a27 Increment frontend chart version 2025-03-26 16:48:29 +01:00
Delirium
1d6fb0ae9e ui: shrink icons when no space, adjust player area for events export … (#3217)
* ui: shrink icons when no space, adjust player area for events export panel, fix panel size

* ui: rm log
2025-03-26 16:38:48 +01:00
GitHub Action
bef91a6136 Increment frontend chart version 2025-03-25 18:15:34 +01:00
Shekar Siri
1e2bd19d32 fix(dashboard): update filter condition in MetricsList
Change the filter type comparison from checking against 'all' to
checking against an empty string. This ensures proper filtering
behavior when filtering metrics in the dashboard component.
2025-03-25 18:10:13 +01:00
179 changed files with 4787 additions and 2154 deletions

122
.github/workflows/assist-server-ee.yaml vendored Normal file
View file

@ -0,0 +1,122 @@
# This action will push the assist changes to aws
on:
workflow_dispatch:
inputs:
skip_security_checks:
description: "Skip Security checks if there is a unfixable vuln or error. Value: true/false"
required: false
default: "false"
push:
branches:
- dev
paths:
- "ee/assist-server/**"
name: Build and Deploy Assist-Server EE
jobs:
deploy:
name: Deploy
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
# We need to diff with old commit
# to see which workers got changed.
fetch-depth: 2
- uses: ./.github/composite-actions/update-keys
with:
assist_jwt_secret: ${{ secrets.ASSIST_JWT_SECRET }}
assist_key: ${{ secrets.ASSIST_KEY }}
domain_name: ${{ secrets.EE_DOMAIN_NAME }}
jwt_refresh_secret: ${{ secrets.JWT_REFRESH_SECRET }}
jwt_secret: ${{ secrets.EE_JWT_SECRET }}
jwt_spot_refresh_secret: ${{ secrets.JWT_SPOT_REFRESH_SECRET }}
jwt_spot_secret: ${{ secrets.JWT_SPOT_SECRET }}
license_key: ${{ secrets.EE_LICENSE_KEY }}
minio_access_key: ${{ secrets.EE_MINIO_ACCESS_KEY }}
minio_secret_key: ${{ secrets.EE_MINIO_SECRET_KEY }}
pg_password: ${{ secrets.EE_PG_PASSWORD }}
registry_url: ${{ secrets.OSS_REGISTRY_URL }}
name: Update Keys
- name: Docker login
run: |
docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}"
- uses: azure/k8s-set-context@v1
with:
method: kubeconfig
kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext
- name: Building and Pushing Assist-Server image
id: build-image
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee
ENVIRONMENT: staging
run: |
skip_security_checks=${{ github.event.inputs.skip_security_checks }}
cd assist-server
PUSH_IMAGE=0 bash -x ./build.sh ee
[[ "x$skip_security_checks" == "xtrue" ]] || {
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.56.2/trivy_0.56.2_Linux-64bit.tar.gz | tar -xzf - -C ./
images=("assist-server")
for image in ${images[*]};do
./trivy image --db-repository ghcr.io/aquasecurity/trivy-db:2 --db-repository public.ecr.aws/aquasecurity/trivy-db:2 --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
done
err_code=$?
[[ $err_code -ne 0 ]] && {
exit $err_code
}
} && {
echo "Skipping Security Checks"
}
images=("assist-server")
for image in ${images[*]};do
docker push $DOCKER_REPO/$image:$IMAGE_TAG
done
- name: Creating old image input
run: |
#
# Create yaml with existing image tags
#
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
echo > /tmp/image_override.yaml
for line in `cat /tmp/image_tag.txt`;
do
image_array=($(echo "$line" | tr ':' '\n'))
cat <<EOF >> /tmp/image_override.yaml
${image_array[0]}:
image:
# We've to strip off the -ee, as helm will append it.
tag: `echo ${image_array[1]} | cut -d '-' -f 1`
EOF
done
- name: Deploy to kubernetes
run: |
pwd
cd scripts/helmcharts/
# Update changed image tag
sed -i "/assist-server/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
cat /tmp/image_override.yaml
# Deploy command
mkdir -p /tmp/charts
mv openreplay/charts/{ingress-nginx,assist-server,quickwit,connector} /tmp/charts/
rm -rf openreplay/charts/*
mv /tmp/charts/* openreplay/charts/
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# We're not passing -ee flag, because helm will add that.
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging

189
.github/workflows/patch-build-old.yaml vendored Normal file
View file

@ -0,0 +1,189 @@
# Ref: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions
on:
workflow_dispatch:
inputs:
services:
description: 'Comma separated names of services to build(in small letters).'
required: true
default: 'chalice,frontend'
tag:
description: 'Tag to update.'
required: true
type: string
branch:
description: 'Branch to build patches from. Make sure the branch is uptodate with tag. Else itll cause missing commits.'
required: true
type: string
name: Build patches from tag, rewrite commit HEAD to older timestamp, and Push the tag
jobs:
deploy:
name: Build Patch from old tag
runs-on: ubuntu-latest
env:
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
steps:
- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 4
ref: ${{ github.event.inputs.tag }}
- name: Set Remote with GITHUB_TOKEN
run: |
git config --unset http.https://github.com/.extraheader
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git
- name: Create backup tag with timestamp
run: |
set -e # Exit immediately if a command exits with a non-zero status
TIMESTAMP=$(date +%Y%m%d%H%M%S)
BACKUP_TAG="${{ github.event.inputs.tag }}-backup-${TIMESTAMP}"
echo "BACKUP_TAG=${BACKUP_TAG}" >> $GITHUB_ENV
echo "INPUT_TAG=${{ github.event.inputs.tag }}" >> $GITHUB_ENV
git tag $BACKUP_TAG || { echo "Failed to create backup tag"; exit 1; }
git push origin $BACKUP_TAG || { echo "Failed to push backup tag"; exit 1; }
echo "Created backup tag: $BACKUP_TAG"
# Get the oldest commit date from the last 3 commits in raw format
OLDEST_COMMIT_TIMESTAMP=$(git log -3 --pretty=format:"%at" | tail -1)
echo "Oldest commit timestamp: $OLDEST_COMMIT_TIMESTAMP"
# Add 1 second to the timestamp
NEW_TIMESTAMP=$((OLDEST_COMMIT_TIMESTAMP + 1))
echo "NEW_TIMESTAMP=$NEW_TIMESTAMP" >> $GITHUB_ENV
- name: Setup yq
uses: mikefarah/yq@master
# Configure AWS credentials for the first registry
- name: Configure AWS credentials for RELEASE_ARM_REGISTRY
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_DEPOT_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AWS_DEPOT_SECRET_KEY }}
aws-region: ${{ secrets.AWS_DEPOT_DEFAULT_REGION }}
- name: Login to Amazon ECR for RELEASE_ARM_REGISTRY
id: login-ecr-arm
run: |
aws ecr get-login-password --region ${{ secrets.AWS_DEPOT_DEFAULT_REGION }} | docker login --username AWS --password-stdin ${{ secrets.RELEASE_ARM_REGISTRY }}
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }}
- uses: depot/setup-action@v1
- name: Get HEAD Commit ID
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
- name: Define Branch Name
run: echo "BRANCH_NAME=${{inputs.branch}}" >> $GITHUB_ENV
- name: Build
id: build-image
env:
DOCKER_REPO_ARM: ${{ secrets.RELEASE_ARM_REGISTRY }}
DOCKER_REPO_OSS: ${{ secrets.RELEASE_OSS_REGISTRY }}
MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
MSAAS_REPO_FOLDER: /tmp/msaas
run: |
set -exo pipefail
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
git checkout -b $BRANCH_NAME
working_dir=$(pwd)
function image_version(){
local service=$1
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
current_version=$(yq eval '.AppVersion' $chart_path)
new_version=$(echo $current_version | awk -F. '{$NF += 1 ; print $1"."$2"."$3}')
echo $new_version
# yq eval ".AppVersion = \"$new_version\"" -i $chart_path
}
function clone_msaas() {
[ -d $MSAAS_REPO_FOLDER ] || {
git clone -b $INPUT_TAG --recursive https://x-access-token:$MSAAS_REPO_CLONE_TOKEN@$MSAAS_REPO_URL $MSAAS_REPO_FOLDER
cd $MSAAS_REPO_FOLDER
cd openreplay && git fetch origin && git checkout $INPUT_TAG
git log -1
cd $MSAAS_REPO_FOLDER
bash git-init.sh
git checkout
}
}
function build_managed() {
local service=$1
local version=$2
echo building managed
clone_msaas
if [[ $service == 'chalice' ]]; then
cd $MSAAS_REPO_FOLDER/openreplay/api
else
cd $MSAAS_REPO_FOLDER/openreplay/$service
fi
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh >> /tmp/arm.txt
}
# Checking for backend images
ls backend/cmd >> /tmp/backend.txt
echo Services: "${{ github.event.inputs.services }}"
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}"
BUILD_SCRIPT_NAME="build.sh"
# Build FOSS
for SERVICE in "${SERVICES[@]}"; do
# Check if service is backend
if grep -q $SERVICE /tmp/backend.txt; then
cd backend
foss_build_args="nil $SERVICE"
ee_build_args="ee $SERVICE"
else
[[ $SERVICE == 'chalice' || $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && cd $working_dir/api || cd $SERVICE
[[ $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && BUILD_SCRIPT_NAME="build_${SERVICE}.sh"
ee_build_args="ee"
fi
version=$(image_version $SERVICE)
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
echo IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
if [[ "$SERVICE" != "chalice" && "$SERVICE" != "frontend" ]]; then
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
else
build_managed $SERVICE $version
fi
cd $working_dir
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$SERVICE/Chart.yaml"
yq eval ".AppVersion = \"$version\"" -i $chart_path
git add $chart_path
git commit -m "Increment $SERVICE chart version"
done
- name: Change commit timestamp
run: |
# Convert the timestamp to a date format git can understand
NEW_DATE=$(perl -le 'print scalar gmtime($ARGV[0])." +0000"' $NEW_TIMESTAMP)
echo "Setting commit date to: $NEW_DATE"
# Amend the commit with the new date
GIT_COMMITTER_DATE="$NEW_DATE" git commit --amend --no-edit --date="$NEW_DATE"
# Verify the change
git log -1 --pretty=format:"Commit now dated: %cD"
# git tag and push
git tag $INPUT_TAG -f
git push origin $INPUT_TAG -f
# - name: Debug Job
# if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3
# env:
# DOCKER_REPO_ARM: ${{ secrets.RELEASE_ARM_REGISTRY }}
# DOCKER_REPO_OSS: ${{ secrets.RELEASE_OSS_REGISTRY }}
# MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
# MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
# MSAAS_REPO_FOLDER: /tmp/msaas
# with:
# limit-access-to-actor: true

View file

@ -2,7 +2,6 @@
on:
workflow_dispatch:
description: 'This workflow will build for patches for latest tag, and will Always use commit from main branch.'
inputs:
services:
description: 'Comma separated names of services to build(in small letters).'
@ -20,12 +19,20 @@ jobs:
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
fetch-depth: 1
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Rebase with main branch, to make sure the code has latest main changes
if: github.ref != 'refs/heads/main'
run: |
git pull --rebase origin main
git remote -v
git config --global user.email "action@github.com"
git config --global user.name "GitHub Action"
git config --global rebase.autoStash true
git fetch origin main:main
git rebase main
git log -3
- name: Downloading yq
run: |
@ -48,6 +55,8 @@ jobs:
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }}
- uses: depot/setup-action@v1
env:
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
- name: Get HEAD Commit ID
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
- name: Define Branch Name
@ -65,78 +74,168 @@ jobs:
MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
MSAAS_REPO_FOLDER: /tmp/msaas
SERVICES_INPUT: ${{ github.event.inputs.services }}
run: |
set -exo pipefail
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
git checkout -b $BRANCH_NAME
working_dir=$(pwd)
function image_version(){
local service=$1
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
current_version=$(yq eval '.AppVersion' $chart_path)
new_version=$(echo $current_version | awk -F. '{$NF += 1 ; print $1"."$2"."$3}')
echo $new_version
# yq eval ".AppVersion = \"$new_version\"" -i $chart_path
#!/bin/bash
set -euo pipefail
# Configuration
readonly WORKING_DIR=$(pwd)
readonly BUILD_SCRIPT_NAME="build.sh"
readonly BACKEND_SERVICES_FILE="/tmp/backend.txt"
# Initialize git configuration
setup_git() {
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
git checkout -b "$BRANCH_NAME"
}
function clone_msaas() {
[ -d $MSAAS_REPO_FOLDER ] || {
git clone -b dev --recursive https://x-access-token:$MSAAS_REPO_CLONE_TOKEN@$MSAAS_REPO_URL $MSAAS_REPO_FOLDER
cd $MSAAS_REPO_FOLDER
cd openreplay && git fetch origin && git checkout main # This have to be changed to specific tag
git log -1
cd $MSAAS_REPO_FOLDER
bash git-init.sh
git checkout
}
# Get and increment image version
image_version() {
local service=$1
local chart_path="$WORKING_DIR/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
local current_version new_version
current_version=$(yq eval '.AppVersion' "$chart_path")
new_version=$(echo "$current_version" | awk -F. '{$NF += 1; print $1"."$2"."$3}')
echo "$new_version"
}
function build_managed() {
local service=$1
local version=$2
echo building managed
clone_msaas
if [[ $service == 'chalice' ]]; then
cd $MSAAS_REPO_FOLDER/openreplay/api
else
cd $MSAAS_REPO_FOLDER/openreplay/$service
fi
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh >> /tmp/arm.txt
# Clone MSAAS repository if not exists
clone_msaas() {
if [[ ! -d "$MSAAS_REPO_FOLDER" ]]; then
git clone -b dev --recursive "https://x-access-token:${MSAAS_REPO_CLONE_TOKEN}@${MSAAS_REPO_URL}" "$MSAAS_REPO_FOLDER"
cd "$MSAAS_REPO_FOLDER"
cd openreplay && git fetch origin && git checkout main
git log -1
cd "$MSAAS_REPO_FOLDER"
bash git-init.sh
git checkout
fi
}
# Checking for backend images
ls backend/cmd >> /tmp/backend.txt
echo Services: "${{ github.event.inputs.services }}"
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}"
BUILD_SCRIPT_NAME="build.sh"
# Build FOSS
for SERVICE in "${SERVICES[@]}"; do
# Check if service is backend
if grep -q $SERVICE /tmp/backend.txt; then
cd backend
foss_build_args="nil $SERVICE"
ee_build_args="ee $SERVICE"
else
[[ $SERVICE == 'chalice' || $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && cd $working_dir/api || cd $SERVICE
[[ $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && BUILD_SCRIPT_NAME="build_${SERVICE}.sh"
ee_build_args="ee"
fi
version=$(image_version $SERVICE)
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
echo IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
if [[ "$SERVICE" != "chalice" && "$SERVICE" != "frontend" ]]; then
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
else
build_managed $SERVICE $version
fi
cd $working_dir
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$SERVICE/Chart.yaml"
yq eval ".AppVersion = \"$version\"" -i $chart_path
git add $chart_path
git commit -m "Increment $SERVICE chart version"
git push --set-upstream origin $BRANCH_NAME
done
# Build managed services
build_managed() {
local service=$1
local version=$2
echo "Building managed service: $service"
clone_msaas
if [[ $service == 'chalice' ]]; then
cd "$MSAAS_REPO_FOLDER/openreplay/api"
else
cd "$MSAAS_REPO_FOLDER/openreplay/$service"
fi
local build_cmd="IMAGE_TAG=$version DOCKER_RUNTIME=depot DOCKER_BUILD_ARGS=--push ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh"
echo "Executing: $build_cmd"
if ! eval "$build_cmd" 2>&1; then
echo "Build failed for $service"
exit 1
fi
}
# Build service with given arguments
build_service() {
local service=$1
local version=$2
local build_args=$3
local build_script=${4:-$BUILD_SCRIPT_NAME}
local command="IMAGE_TAG=$version DOCKER_RUNTIME=depot DOCKER_BUILD_ARGS=--push ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash $build_script $build_args"
echo "Executing: $command"
eval "$command"
}
# Update chart version and commit changes
update_chart_version() {
local service=$1
local version=$2
local chart_path="$WORKING_DIR/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
# Ensure we're in the original working directory/repository
cd "$WORKING_DIR"
yq eval ".AppVersion = \"$version\"" -i "$chart_path"
git add "$chart_path"
git commit -m "Increment $service chart version to $version"
git push --set-upstream origin "$BRANCH_NAME"
cd -
}
# Main execution
main() {
setup_git
# Get backend services list
ls backend/cmd >"$BACKEND_SERVICES_FILE"
# Parse services input (fix for GitHub Actions syntax)
echo "Services: ${SERVICES_INPUT:-$1}"
IFS=',' read -ra services <<<"${SERVICES_INPUT:-$1}"
# Process each service
for service in "${services[@]}"; do
echo "Processing service: $service"
cd "$WORKING_DIR"
local foss_build_args="" ee_build_args="" build_script="$BUILD_SCRIPT_NAME"
# Determine build configuration based on service type
if grep -q "$service" "$BACKEND_SERVICES_FILE"; then
# Backend service
cd backend
foss_build_args="nil $service"
ee_build_args="ee $service"
else
# Non-backend service
case "$service" in
chalice | alerts | crons)
cd "$WORKING_DIR/api"
;;
*)
cd "$service"
;;
esac
# Special build scripts for alerts/crons
if [[ $service == 'alerts' || $service == 'crons' ]]; then
build_script="build_${service}.sh"
fi
ee_build_args="ee"
fi
# Get version and build
local version
version=$(image_version "$service")
# Build FOSS and EE versions
build_service "$service" "$version" "$foss_build_args"
build_service "$service" "${version}-ee" "$ee_build_args"
# Build managed version for specific services
if [[ "$service" != "chalice" && "$service" != "frontend" ]]; then
echo "Nothing to build in managed for service $service"
else
build_managed "$service" "$version"
fi
# Update chart and commit
update_chart_version "$service" "$version"
done
cd "$WORKING_DIR"
# Cleanup
rm -f "$BACKEND_SERVICES_FILE"
}
echo "Working directory: $WORKING_DIR"
# Run main function with all arguments
main "$SERVICES_INPUT"
- name: Create Pull Request
uses: repo-sync/pull-request@v2
@ -147,8 +246,7 @@ jobs:
pr_title: "Updated patch build from main ${{ env.HEAD_COMMIT_ID }}"
pr_body: |
This PR updates the Helm chart version after building the patch from $HEAD_COMMIT_ID.
Once this PR is merged, To update the latest tag, run the following workflow.
https://github.com/openreplay/openreplay/actions/workflows/update-tag.yaml
Once this PR is merged, tag update job will run automatically.
# - name: Debug Job
# if: ${{ failure() }}

View file

@ -1,35 +1,42 @@
on:
workflow_dispatch:
description: "This workflow will build for patches for latest tag, and will Always use commit from main branch."
inputs:
services:
description: "This action will update the latest tag with current main branch HEAD. Should I proceed ? true/false"
required: true
default: "false"
name: Force Push tag with main branch HEAD
pull_request:
types: [closed]
branches:
- main
name: Release tag update --force
jobs:
deploy:
name: Build Patch from main
runs-on: ubuntu-latest
env:
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
if: ${{ (github.event_name == 'pull_request' && github.event.pull_request.merged == true) || github.event.inputs.services == 'true' }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Get latest release tag using GitHub API
id: get-latest-tag
run: |
LATEST_TAG=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
"https://api.github.com/repos/${{ github.repository }}/releases/latest" \
| jq -r .tag_name)
# Fallback to git command if API doesn't return a tag
if [ "$LATEST_TAG" == "null" ] || [ -z "$LATEST_TAG" ]; then
echo "Not found latest tag"
exit 100
fi
echo "LATEST_TAG=$LATEST_TAG" >> $GITHUB_ENV
echo "Latest tag: $LATEST_TAG"
- name: Set Remote with GITHUB_TOKEN
run: |
git config --unset http.https://github.com/.extraheader
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}
- name: Push main branch to tag
run: |
git fetch --tags
git checkout main
git push origin HEAD:refs/tags/$(git tag --list 'v[0-9]*' --sort=-v:refname | head -n 1) --force
# - name: Debug Job
# if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3
# with:
# limit-access-to-actor: true
echo "Updating tag ${{ env.LATEST_TAG }} to point to latest commit on main"
git push origin HEAD:refs/tags/${{ env.LATEST_TAG }} --force

View file

@ -85,7 +85,8 @@ def __generic_query(typename, value_length=None):
ORDER BY value"""
if value_length is None or value_length > 2:
return f"""(SELECT DISTINCT value, type
return f"""SELECT DISTINCT ON(value,type) value, type
((SELECT DISTINCT value, type
FROM {TABLE}
WHERE
project_id = %(project_id)s
@ -101,7 +102,7 @@ def __generic_query(typename, value_length=None):
AND type='{typename.upper()}'
AND value ILIKE %(value)s
ORDER BY value
LIMIT 5);"""
LIMIT 5)) AS raw;"""
return f"""SELECT DISTINCT value, type
FROM {TABLE}
WHERE
@ -326,7 +327,7 @@ def __search_metadata(project_id, value, key=None, source=None):
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
SELECT key, value, 'METADATA' AS TYPE
SELECT DISTINCT ON(key, value) key, value, 'METADATA' AS TYPE
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))

View file

@ -338,14 +338,14 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
SELECT details.error_id as error_id,
name, message, users, total,
sessions, last_occurrence, first_occurrence, chart
FROM (SELECT JSONExtractString(toString(`$properties`), 'error_id') AS error_id,
FROM (SELECT error_id,
JSONExtractString(toString(`$properties`), 'name') AS name,
JSONExtractString(toString(`$properties`), 'message') AS message,
COUNT(DISTINCT user_id) AS users,
COUNT(DISTINCT events.session_id) AS sessions,
MAX(created_at) AS max_datetime,
MIN(created_at) AS min_datetime,
COUNT(DISTINCT JSONExtractString(toString(`$properties`), 'error_id'))
COUNT(DISTINCT error_id)
OVER() AS total
FROM {MAIN_EVENTS_TABLE} AS events
INNER JOIN (SELECT session_id, coalesce(user_id,toString(user_uuid)) AS user_id
@ -357,7 +357,7 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
GROUP BY error_id, name, message
ORDER BY {sort} {order}
LIMIT %(errors_limit)s OFFSET %(errors_offset)s) AS details
INNER JOIN (SELECT JSONExtractString(toString(`$properties`), 'error_id') AS error_id,
INNER JOIN (SELECT error_id,
toUnixTimestamp(MAX(created_at))*1000 AS last_occurrence,
toUnixTimestamp(MIN(created_at))*1000 AS first_occurrence
FROM {MAIN_EVENTS_TABLE}
@ -366,7 +366,7 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
GROUP BY error_id) AS time_details
ON details.error_id=time_details.error_id
INNER JOIN (SELECT error_id, groupArray([timestamp, count]) AS chart
FROM (SELECT JSONExtractString(toString(`$properties`), 'error_id') AS error_id,
FROM (SELECT error_id,
gs.generate_series AS timestamp,
COUNT(DISTINCT session_id) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS gs

View file

@ -50,8 +50,8 @@ class JIRAIntegration(base.BaseIntegration):
cur.execute(
cur.mogrify(
"""SELECT username, token, url
FROM public.jira_cloud
WHERE user_id=%(user_id)s;""",
FROM public.jira_cloud
WHERE user_id = %(user_id)s;""",
{"user_id": self._user_id})
)
data = helper.dict_to_camel_case(cur.fetchone())
@ -95,10 +95,9 @@ class JIRAIntegration(base.BaseIntegration):
def add(self, username, token, url, obfuscate=False):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
INSERT INTO public.jira_cloud(username, token, user_id,url)
VALUES (%(username)s, %(token)s, %(user_id)s,%(url)s)
RETURNING username, token, url;""",
cur.mogrify(""" \
INSERT INTO public.jira_cloud(username, token, user_id, url)
VALUES (%(username)s, %(token)s, %(user_id)s, %(url)s) RETURNING username, token, url;""",
{"user_id": self._user_id, "username": username,
"token": token, "url": url})
)
@ -112,9 +111,10 @@ class JIRAIntegration(base.BaseIntegration):
def delete(self):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
DELETE FROM public.jira_cloud
WHERE user_id=%(user_id)s;""",
cur.mogrify(""" \
DELETE
FROM public.jira_cloud
WHERE user_id = %(user_id)s;""",
{"user_id": self._user_id})
)
return {"state": "success"}
@ -125,7 +125,7 @@ class JIRAIntegration(base.BaseIntegration):
changes={
"username": data.username,
"token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \
else self.integration.token,
else self.integration["token"],
"url": str(data.url)
},
obfuscate=True

View file

@ -85,6 +85,9 @@ def __complete_missing_steps(start_time, end_time, density, neutral, rows, time_
# compute avg_time_from_previous at the same level as sessions_count (this was removed in v1.22)
# if start-point is selected, the selected event is ranked n°1
def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
if not data.hide_excess:
data.hide_excess = True
data.rows = 50
sub_events = []
start_points_conditions = []
step_0_conditions = []

View file

@ -153,7 +153,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
"isEvent": True,
"value": [],
"operator": e.operator,
"filters": []
"filters": e.filters
})
for v in e.value:
if v not in extra_conditions[e.operator].value:
@ -178,7 +178,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
"isEvent": True,
"value": [],
"operator": e.operator,
"filters": []
"filters": e.filters
})
for v in e.value:
if v not in extra_conditions[e.operator].value:
@ -1108,8 +1108,12 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
is_any = sh.isAny_opreator(f.operator)
if is_any or len(f.value) == 0:
continue
is_negative_operator = sh.is_negation_operator(f.operator)
f.value = helper.values_for_operator(value=f.value, op=f.operator)
op = sh.get_sql_operator(f.operator)
r_op = ""
if is_negative_operator:
r_op = sh.reverse_sql_operator(op)
e_k_f = e_k + f"_fetch{j}"
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
if f.type == schemas.FetchFilterType.FETCH_URL:
@ -1118,6 +1122,12 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
if is_negative_operator:
events_conditions_not.append(
{
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = sh.multi_conditions(
f"sub.`$properties`.url_path {r_op} %({e_k_f})s", f.value, value_key=e_k_f)
elif f.type == schemas.FetchFilterType.FETCH_STATUS_CODE:
event_where.append(json_condition(
"main", "$properties", 'status', op, f.value, e_k_f, True, True
@ -1130,6 +1140,13 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
if is_negative_operator:
events_conditions_not.append(
{
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = sh.multi_conditions(
f"sub.`$properties`.method {r_op} %({e_k_f})s", f.value,
value_key=e_k_f)
elif f.type == schemas.FetchFilterType.FETCH_DURATION:
event_where.append(
sh.multi_conditions(f"main.`$duration_s` {f.operator} %({e_k_f})s/1000", f.value,
@ -1142,12 +1159,26 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
if is_negative_operator:
events_conditions_not.append(
{
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = sh.multi_conditions(
f"sub.`$properties`.request_body {r_op} %({e_k_f})s", f.value,
value_key=e_k_f)
elif f.type == schemas.FetchFilterType.FETCH_RESPONSE_BODY:
event_where.append(json_condition(
"main", "$properties", 'response_body', op, f.value, e_k_f
))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
if is_negative_operator:
events_conditions_not.append(
{
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = sh.multi_conditions(
f"sub.`$properties`.response_body {r_op} %({e_k_f})s", f.value,
value_key=e_k_f)
else:
logging.warning(f"undefined FETCH filter: {f.type}")
if not apply:
@ -1395,17 +1426,30 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
if extra_conditions and len(extra_conditions) > 0:
_extra_or_condition = []
for i, c in enumerate(extra_conditions):
if sh.isAny_opreator(c.operator):
if sh.isAny_opreator(c.operator) and c.type != schemas.EventType.REQUEST_DETAILS.value:
continue
e_k = f"ec_value{i}"
op = sh.get_sql_operator(c.operator)
c.value = helper.values_for_operator(value=c.value, op=c.operator)
full_args = {**full_args,
**sh.multi_values(c.value, value_key=e_k)}
if c.type == events.EventType.LOCATION.ui_type:
if c.type in (schemas.EventType.LOCATION.value, schemas.EventType.REQUEST.value):
_extra_or_condition.append(
sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s",
c.value, value_key=e_k))
elif c.type == schemas.EventType.REQUEST_DETAILS.value:
for j, c_f in enumerate(c.filters):
if sh.isAny_opreator(c_f.operator) or len(c_f.value) == 0:
continue
e_k += f"_{j}"
op = sh.get_sql_operator(c_f.operator)
c_f.value = helper.values_for_operator(value=c_f.value, op=c_f.operator)
full_args = {**full_args,
**sh.multi_values(c_f.value, value_key=e_k)}
if c_f.type == schemas.FetchFilterType.FETCH_URL.value:
_extra_or_condition.append(
sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s",
c_f.value, value_key=e_k))
else:
logging.warning(f"unsupported extra_event type:${c.type}")
if len(_extra_or_condition) > 0:

View file

@ -148,7 +148,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
"isEvent": True,
"value": [],
"operator": e.operator,
"filters": []
"filters": e.filters
})
for v in e.value:
if v not in extra_conditions[e.operator].value:
@ -165,7 +165,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
"isEvent": True,
"value": [],
"operator": e.operator,
"filters": []
"filters": e.filters
})
for v in e.value:
if v not in extra_conditions[e.operator].value:
@ -989,7 +989,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
sh.multi_conditions(f"ev.{events.EventType.LOCATION.column} {op} %({e_k})s",
c.value, value_key=e_k))
else:
logger.warning(f"unsupported extra_event type:${c.type}")
logger.warning(f"unsupported extra_event type: {c.type}")
if len(_extra_or_condition) > 0:
extra_constraints.append("(" + " OR ".join(_extra_or_condition) + ")")
query_part = f"""\

View file

@ -4,37 +4,41 @@ import schemas
def get_sql_operator(op: Union[schemas.SearchEventOperator, schemas.ClickEventExtraOperator, schemas.MathOperator]):
if isinstance(op, Enum):
op = op.value
return {
schemas.SearchEventOperator.IS: "=",
schemas.SearchEventOperator.ON: "=",
schemas.SearchEventOperator.ON_ANY: "IN",
schemas.SearchEventOperator.IS_NOT: "!=",
schemas.SearchEventOperator.NOT_ON: "!=",
schemas.SearchEventOperator.CONTAINS: "ILIKE",
schemas.SearchEventOperator.NOT_CONTAINS: "NOT ILIKE",
schemas.SearchEventOperator.STARTS_WITH: "ILIKE",
schemas.SearchEventOperator.ENDS_WITH: "ILIKE",
schemas.SearchEventOperator.IS.value: "=",
schemas.SearchEventOperator.ON.value: "=",
schemas.SearchEventOperator.ON_ANY.value: "IN",
schemas.SearchEventOperator.IS_NOT.value: "!=",
schemas.SearchEventOperator.NOT_ON.value: "!=",
schemas.SearchEventOperator.CONTAINS.value: "ILIKE",
schemas.SearchEventOperator.NOT_CONTAINS.value: "NOT ILIKE",
schemas.SearchEventOperator.STARTS_WITH.value: "ILIKE",
schemas.SearchEventOperator.ENDS_WITH.value: "ILIKE",
# Selector operators:
schemas.ClickEventExtraOperator.IS: "=",
schemas.ClickEventExtraOperator.IS_NOT: "!=",
schemas.ClickEventExtraOperator.CONTAINS: "ILIKE",
schemas.ClickEventExtraOperator.NOT_CONTAINS: "NOT ILIKE",
schemas.ClickEventExtraOperator.STARTS_WITH: "ILIKE",
schemas.ClickEventExtraOperator.ENDS_WITH: "ILIKE",
schemas.ClickEventExtraOperator.IS.value: "=",
schemas.ClickEventExtraOperator.IS_NOT.value: "!=",
schemas.ClickEventExtraOperator.CONTAINS.value: "ILIKE",
schemas.ClickEventExtraOperator.NOT_CONTAINS.value: "NOT ILIKE",
schemas.ClickEventExtraOperator.STARTS_WITH.value: "ILIKE",
schemas.ClickEventExtraOperator.ENDS_WITH.value: "ILIKE",
schemas.MathOperator.GREATER: ">",
schemas.MathOperator.GREATER_EQ: ">=",
schemas.MathOperator.LESS: "<",
schemas.MathOperator.LESS_EQ: "<=",
schemas.MathOperator.GREATER.value: ">",
schemas.MathOperator.GREATER_EQ.value: ">=",
schemas.MathOperator.LESS.value: "<",
schemas.MathOperator.LESS_EQ.value: "<=",
}.get(op, "=")
def is_negation_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator.IS_NOT,
schemas.SearchEventOperator.NOT_ON,
schemas.SearchEventOperator.NOT_CONTAINS,
schemas.ClickEventExtraOperator.IS_NOT,
schemas.ClickEventExtraOperator.NOT_CONTAINS]
if isinstance(op, Enum):
op = op.value
return op in [schemas.SearchEventOperator.IS_NOT.value,
schemas.SearchEventOperator.NOT_ON.value,
schemas.SearchEventOperator.NOT_CONTAINS.value,
schemas.ClickEventExtraOperator.IS_NOT.value,
schemas.ClickEventExtraOperator.NOT_CONTAINS.value]
def reverse_sql_operator(op):

View file

@ -960,36 +960,6 @@ class CardSessionsSchema(_TimedSchema, _PaginatedSchema):
return self
# We don't need this as the UI is expecting filters to override the full series' filters
# @model_validator(mode="after")
# def __merge_out_filters_with_series(self):
# for f in self.filters:
# for s in self.series:
# found = False
#
# if f.is_event:
# sub = s.filter.events
# else:
# sub = s.filter.filters
#
# for e in sub:
# if f.type == e.type and f.operator == e.operator:
# found = True
# if f.is_event:
# # If extra event: append value
# for v in f.value:
# if v not in e.value:
# e.value.append(v)
# else:
# # If extra filter: override value
# e.value = f.value
# if not found:
# sub.append(f)
#
# self.filters = []
#
# return self
# UI is expecting filters to override the full series' filters
@model_validator(mode="after")
def __override_series_filters_with_outer_filters(self):
@ -1060,6 +1030,16 @@ class CardTable(__CardSchema):
values["metricValue"] = []
return values
@model_validator(mode="after")
def __enforce_AND_operator(self):
self.metric_of = MetricOfTable(self.metric_of)
if self.metric_of in (MetricOfTable.VISITED_URL, MetricOfTable.FETCH, \
MetricOfTable.VISITED_URL.value, MetricOfTable.FETCH.value):
for s in self.series:
if s.filter is not None:
s.filter.events_order = SearchEventOrder.AND
return self
@model_validator(mode="after")
def __transform(self):
self.metric_of = MetricOfTable(self.metric_of)
@ -1135,7 +1115,7 @@ class CardPathAnalysis(__CardSchema):
view_type: MetricOtherViewType = Field(...)
metric_value: List[ProductAnalyticsSelectedEventType] = Field(default_factory=list)
density: int = Field(default=4, ge=2, le=10)
rows: int = Field(default=3, ge=1, le=10)
rows: int = Field(default=5, ge=1, le=10)
start_type: Literal["start", "end"] = Field(default="start")
start_point: List[PathAnalysisSubFilterSchema] = Field(default_factory=list)

View file

@ -19,14 +19,16 @@ const EVENTS_DEFINITION = {
}
};
EVENTS_DEFINITION.emit = {
NEW_AGENT: "NEW_AGENT",
NO_AGENTS: "NO_AGENT",
AGENT_DISCONNECT: "AGENT_DISCONNECTED",
AGENTS_CONNECTED: "AGENTS_CONNECTED",
NO_SESSIONS: "SESSION_DISCONNECTED",
SESSION_ALREADY_CONNECTED: "SESSION_ALREADY_CONNECTED",
SESSION_RECONNECTED: "SESSION_RECONNECTED",
UPDATE_EVENT: EVENTS_DEFINITION.listen.UPDATE_EVENT
NEW_AGENT: "NEW_AGENT",
NO_AGENTS: "NO_AGENT",
AGENT_DISCONNECT: "AGENT_DISCONNECTED",
AGENTS_CONNECTED: "AGENTS_CONNECTED",
AGENTS_INFO_CONNECTED: "AGENTS_INFO_CONNECTED",
NO_SESSIONS: "SESSION_DISCONNECTED",
SESSION_ALREADY_CONNECTED: "SESSION_ALREADY_CONNECTED",
SESSION_RECONNECTED: "SESSION_RECONNECTED",
UPDATE_EVENT: EVENTS_DEFINITION.listen.UPDATE_EVENT,
WEBRTC_CONFIG: "WEBRTC_CONFIG",
};
const BASE_sessionInfo = {

View file

@ -42,7 +42,7 @@ const findSessionSocketId = async (io, roomId, tabId) => {
};
async function getRoomData(io, roomID) {
let tabsCount = 0, agentsCount = 0, tabIDs = [], agentIDs = [];
let tabsCount = 0, agentsCount = 0, tabIDs = [], agentIDs = [], config = null, agentInfos = [];
const connected_sockets = await io.in(roomID).fetchSockets();
if (connected_sockets.length > 0) {
for (let socket of connected_sockets) {
@ -52,13 +52,19 @@ async function getRoomData(io, roomID) {
} else {
agentsCount++;
agentIDs.push(socket.id);
agentInfos.push({ ...socket.handshake.query.agentInfo, socketId: socket.id });
if (socket.handshake.query.config !== undefined) {
config = socket.handshake.query.config;
}
}
}
} else {
tabsCount = -1;
agentsCount = -1;
agentInfos = [];
agentIDs = [];
}
return {tabsCount, agentsCount, tabIDs, agentIDs};
return {tabsCount, agentsCount, tabIDs, agentIDs, config, agentInfos};
}
function processNewSocket(socket) {
@ -78,7 +84,7 @@ async function onConnect(socket) {
IncreaseOnlineConnections(socket.handshake.query.identity);
const io = getServer();
const {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(io, socket.handshake.query.roomId);
const {tabsCount, agentsCount, tabIDs, agentInfos, agentIDs, config} = await getRoomData(io, socket.handshake.query.roomId);
if (socket.handshake.query.identity === IDENTITIES.session) {
// Check if session with the same tabID already connected, if so, refuse new connexion
@ -100,7 +106,9 @@ async function onConnect(socket) {
// Inform all connected agents about reconnected session
if (agentsCount > 0) {
logger.debug(`notifying new session about agent-existence`);
io.to(socket.id).emit(EVENTS_DEFINITION.emit.WEBRTC_CONFIG, config);
io.to(socket.id).emit(EVENTS_DEFINITION.emit.AGENTS_CONNECTED, agentIDs);
io.to(socket.id).emit(EVENTS_DEFINITION.emit.AGENTS_INFO_CONNECTED, agentInfos);
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.SESSION_RECONNECTED, socket.id);
}
} else if (tabsCount <= 0) {
@ -118,7 +126,8 @@ async function onConnect(socket) {
// Stats
startAssist(socket, socket.handshake.query.agentID);
}
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.NEW_AGENT, socket.id, socket.handshake.query.agentInfo);
io.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.WEBRTC_CONFIG, socket.handshake.query.config);
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.NEW_AGENT, socket.id, { ...socket.handshake.query.agentInfo });
}
// Set disconnect handler

View file

@ -2,11 +2,12 @@ package datasaver
import (
"context"
"encoding/json"
"openreplay/backend/pkg/db/types"
"openreplay/backend/internal/config/db"
"openreplay/backend/pkg/db/clickhouse"
"openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/db/types"
"openreplay/backend/pkg/logger"
. "openreplay/backend/pkg/messages"
queue "openreplay/backend/pkg/queue/types"
@ -50,10 +51,6 @@ func New(log logger.Logger, cfg *db.Config, pg *postgres.Conn, ch clickhouse.Con
}
func (s *saverImpl) Handle(msg Message) {
if msg.TypeID() == MsgCustomEvent {
defer s.Handle(types.WrapCustomEvent(msg.(*CustomEvent)))
}
var (
sessCtx = context.WithValue(context.Background(), "sessionID", msg.SessionID())
session *sessions.Session
@ -69,6 +66,23 @@ func (s *saverImpl) Handle(msg Message) {
return
}
if msg.TypeID() == MsgCustomEvent {
m := msg.(*CustomEvent)
// Try to parse custom event payload to JSON and extract or_payload field
type CustomEventPayload struct {
CustomTimestamp uint64 `json:"or_timestamp"`
}
customPayload := &CustomEventPayload{}
if err := json.Unmarshal([]byte(m.Payload), customPayload); err == nil {
if customPayload.CustomTimestamp >= session.Timestamp {
s.log.Info(sessCtx, "custom event timestamp received: %v", m.Timestamp)
msg.Meta().Timestamp = customPayload.CustomTimestamp
s.log.Info(sessCtx, "custom event timestamp updated: %v", m.Timestamp)
}
}
defer s.Handle(types.WrapCustomEvent(m))
}
if IsMobileType(msg.TypeID()) {
if err := s.handleMobileMessage(sessCtx, session, msg); err != nil {
if !postgres.IsPkeyViolation(err) {

View file

@ -86,7 +86,8 @@ def __generic_query(typename, value_length=None):
ORDER BY value"""
if value_length is None or value_length > 2:
return f"""(SELECT DISTINCT value, type
return f"""SELECT DISTINCT ON(value, type) value, type
FROM ((SELECT DISTINCT value, type
FROM {TABLE}
WHERE
project_id = %(project_id)s
@ -102,7 +103,7 @@ def __generic_query(typename, value_length=None):
AND type='{typename.upper()}'
AND value ILIKE %(value)s
ORDER BY value
LIMIT 5);"""
LIMIT 5)) AS raw;"""
return f"""SELECT DISTINCT value, type
FROM {TABLE}
WHERE
@ -257,7 +258,7 @@ def __search_metadata(project_id, value, key=None, source=None):
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
with ch_client.ClickHouseClient() as cur:
query = cur.format(query=f"""SELECT key, value, 'METADATA' AS TYPE
query = cur.format(query=f"""SELECT DISTINCT ON(key, value) key, value, 'METADATA' AS TYPE
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
LIMIT 5;""", parameters={"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)})

View file

@ -71,7 +71,7 @@ def get_details(project_id, error_id, user_id, **data):
MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(0)
ch_basic_query = errors_helper.__get_basic_constraints_ch(time_constraint=False)
ch_basic_query.append("toString(`$properties`.error_id) = %(error_id)s")
ch_basic_query.append("error_id = %(error_id)s")
with ch_client.ClickHouseClient() as ch:
data["startDate24"] = TimeUTC.now(-1)
@ -95,7 +95,7 @@ def get_details(project_id, error_id, user_id, **data):
"error_id": error_id}
main_ch_query = f"""\
WITH pre_processed AS (SELECT toString(`$properties`.error_id) AS error_id,
WITH pre_processed AS (SELECT error_id,
toString(`$properties`.name) AS name,
toString(`$properties`.message) AS message,
session_id,
@ -183,7 +183,7 @@ def get_details(project_id, error_id, user_id, **data):
AND `$event_name` = 'ERROR'
AND events.created_at >= toDateTime(timestamp / 1000)
AND events.created_at < toDateTime((timestamp + %(step_size24)s) / 1000)
AND toString(`$properties`.error_id) = %(error_id)s
AND error_id = %(error_id)s
GROUP BY timestamp
ORDER BY timestamp) AS chart_details
) AS chart_details24 ON TRUE
@ -196,7 +196,7 @@ def get_details(project_id, error_id, user_id, **data):
AND `$event_name` = 'ERROR'
AND events.created_at >= toDateTime(timestamp / 1000)
AND events.created_at < toDateTime((timestamp + %(step_size30)s) / 1000)
AND toString(`$properties`.error_id) = %(error_id)s
AND error_id = %(error_id)s
GROUP BY timestamp
ORDER BY timestamp) AS chart_details
) AS chart_details30 ON TRUE;"""

View file

@ -927,12 +927,12 @@ def authenticate_sso(email: str, internal_id: str):
aud=AUDIENCE, jwt_jti=j_r.jwt_refresh_jti),
"refreshTokenMaxAge": config("JWT_REFRESH_EXPIRATION", cast=int),
"spotJwt": authorizers.generate_jwt(user_id=r['userId'], tenant_id=r['tenantId'],
iat=j_r.spot_jwt_iat, aud=spot.AUDIENCE),
iat=j_r.spot_jwt_iat, aud=spot.AUDIENCE, for_spot=True),
"spotRefreshToken": authorizers.generate_jwt_refresh(user_id=r['userId'],
tenant_id=r['tenantId'],
iat=j_r.spot_jwt_refresh_iat,
aud=spot.AUDIENCE,
jwt_jti=j_r.spot_jwt_refresh_jti),
jwt_jti=j_r.spot_jwt_refresh_jti, for_spot=True),
"spotRefreshTokenMaxAge": config("JWT_SPOT_REFRESH_EXPIRATION", cast=int)
}
return response

View file

@ -1,3 +1,16 @@
SELECT 1
FROM (SELECT throwIf(platform = 'ios', 'IOS sessions found')
FROM experimental.sessions) AS raw
LIMIT 1;
SELECT 1
FROM (SELECT throwIf(platform = 'android', 'Android sessions found')
FROM experimental.sessions) AS raw
LIMIT 1;
ALTER TABLE experimental.sessions
MODIFY COLUMN platform Enum8('web'=1,'mobile'=2) DEFAULT 'web';
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
SET allow_experimental_json_type = 1;
@ -151,8 +164,7 @@ CREATE TABLE IF NOT EXISTS product_analytics.events
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, "$event_name", created_at, session_id)
TTL _timestamp + INTERVAL 1 MONTH ,
_deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
TTL _deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
-- The list of events that should not be ingested,
-- according to a specific event_name and optional properties

View file

@ -9,8 +9,7 @@ CREATE TABLE IF NOT EXISTS experimental.autocomplete
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, type, value)
TTL _timestamp + INTERVAL 1 MONTH;
ORDER BY (project_id, type, value);
CREATE TABLE IF NOT EXISTS experimental.events
(
@ -87,8 +86,7 @@ CREATE TABLE IF NOT EXISTS experimental.events
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id, message_id)
TTL datetime + INTERVAL 3 MONTH;
ORDER BY (project_id, datetime, event_type, session_id, message_id);
@ -108,7 +106,7 @@ CREATE TABLE IF NOT EXISTS experimental.sessions
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122,'BU'=123, 'VD'=124, 'YD'=125, 'DD'=126),
user_city LowCardinality(String),
user_state LowCardinality(String),
platform Enum8('web'=1,'ios'=2,'android'=3) DEFAULT 'web',
platform Enum8('web'=1,'mobile'=2) DEFAULT 'web',
datetime DateTime,
timezone LowCardinality(Nullable(String)),
duration UInt32,
@ -140,7 +138,6 @@ CREATE TABLE IF NOT EXISTS experimental.sessions
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 3 MONTH
SETTINGS index_granularity = 512;
CREATE TABLE IF NOT EXISTS experimental.user_favorite_sessions
@ -152,8 +149,7 @@ CREATE TABLE IF NOT EXISTS experimental.user_favorite_sessions
sign Int8
) ENGINE = CollapsingMergeTree(sign)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, session_id)
TTL _timestamp + INTERVAL 3 MONTH;
ORDER BY (project_id, user_id, session_id);
CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
(
@ -163,8 +159,7 @@ CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, session_id)
TTL _timestamp + INTERVAL 3 MONTH;
ORDER BY (project_id, user_id, session_id);
CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors
(
@ -174,8 +169,7 @@ CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, error_id)
TTL _timestamp + INTERVAL 3 MONTH;
ORDER BY (project_id, user_id, error_id);
CREATE TABLE IF NOT EXISTS experimental.issues
(
@ -188,8 +182,7 @@ CREATE TABLE IF NOT EXISTS experimental.issues
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, issue_id, type)
TTL _timestamp + INTERVAL 3 MONTH;
ORDER BY (project_id, issue_id, type);
@ -292,8 +285,7 @@ CREATE TABLE IF NOT EXISTS experimental.sessions_feature_flags
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, session_id, feature_flag_id, condition_id)
TTL datetime + INTERVAL 3 MONTH;
ORDER BY (project_id, datetime, session_id, feature_flag_id, condition_id);
CREATE TABLE IF NOT EXISTS experimental.ios_events
(
@ -329,8 +321,7 @@ CREATE TABLE IF NOT EXISTS experimental.ios_events
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id, message_id)
TTL datetime + INTERVAL 3 MONTH;
ORDER BY (project_id, datetime, event_type, session_id, message_id);
SET allow_experimental_json_type = 1;
@ -484,8 +475,7 @@ CREATE TABLE IF NOT EXISTS product_analytics.events
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, "$event_name", created_at, session_id)
TTL _timestamp + INTERVAL 1 MONTH ,
_deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
TTL _deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
-- The list of events that should not be ingested,
-- according to a specific event_name and optional properties

View file

@ -1,5 +1,4 @@
import withSiteIdUpdater from 'HOCs/withSiteIdUpdater';
import withSiteIdUpdater from 'HOCs/withSiteIdUpdater';
import React, { Suspense, lazy } from 'react';
import { Redirect, Route, Switch } from 'react-router-dom';
import { observer } from 'mobx-react-lite';
@ -10,7 +9,7 @@ import { Loader } from 'UI';
import APIClient from './api_client';
import * as routes from './routes';
import { debounce } from '@/utils';
import { debounceCall } from '@/utils';
const components: any = {
SessionPure: lazy(() => import('Components/Session/Session')),
@ -88,7 +87,6 @@ const ASSIST_PATH = routes.assist();
const LIVE_SESSION_PATH = routes.liveSession();
const MULTIVIEW_PATH = routes.multiview();
const MULTIVIEW_INDEX_PATH = routes.multiviewIndex();
const ASSIST_STATS_PATH = routes.assistStats();
const USABILITY_TESTING_PATH = routes.usabilityTesting();
const USABILITY_TESTING_EDIT_PATH = routes.usabilityTestingEdit();
@ -99,7 +97,6 @@ const SPOT_PATH = routes.spot();
const SCOPE_SETUP = routes.scopeSetup();
const HIGHLIGHTS_PATH = routes.highlights();
let debounceSearch: any = () => {};
function PrivateRoutes() {
const { projectsStore, userStore, integrationsStore, searchStore } = useStore();
@ -124,14 +121,10 @@ function PrivateRoutes() {
}
}, [siteId]);
React.useEffect(() => {
debounceSearch = debounce(() => searchStore.fetchSessions(), 500);
}, []);
React.useEffect(() => {
if (!searchStore.urlParsed) return;
debounceSearch();
}, [searchStore.instance.filters, searchStore.instance.eventsOrder]);
debounceCall(() => searchStore.fetchSessions(true), 250)()
}, [searchStore.urlParsed, searchStore.instance.filters, searchStore.instance.eventsOrder]);
return (
<Suspense fallback={<Loader loading className="flex-1" />}>

View file

@ -1,7 +1,7 @@
import React, { useState, useEffect } from 'react';
import cn from 'classnames';
import Counter from 'App/components/shared/SessionItem/Counter';
import Draggable from 'react-draggable';
import { useDraggable } from '@neodrag/react';
import type { LocalStream } from 'Player';
import { PlayerContext } from 'App/components/Session/playerContext';
import ChatControls from '../ChatControls/ChatControls';
@ -25,6 +25,8 @@ function ChatWindow({
isPrestart,
}: Props) {
const { t } = useTranslation();
const dragRef = React.useRef<HTMLDivElement>(null);
useDraggable(dragRef, { bounds: 'body', defaultPosition: { x: 50, y: 200 } })
const { player } = React.useContext(PlayerContext);
const { toggleVideoLocalStream } = player.assistManager;
@ -39,11 +41,7 @@ function ChatWindow({
}, [localVideoEnabled]);
return (
<Draggable
handle=".handle"
bounds="body"
defaultPosition={{ x: 50, y: 200 }}
>
<div ref={dragRef}>
<div
className={cn(stl.wrapper, 'fixed radius bg-white shadow-xl mt-16')}
style={{ width: '280px' }}
@ -102,7 +100,7 @@ function ChatWindow({
isPrestart={isPrestart}
/>
</div>
</Draggable>
</div>
);
}

View file

@ -82,7 +82,7 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
{ stream: MediaStream; isAgent: boolean }[] | null
>([]);
const [localStream, setLocalStream] = useState<LocalStream | null>(null);
const [callObject, setCallObject] = useState<{ end: () => void } | null>(
const [callObject, setCallObject] = useState<{ end: () => void } | null | undefined>(
null,
);
@ -135,6 +135,7 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
}, [peerConnectionStatus]);
const addIncomeStream = (stream: MediaStream, isAgent: boolean) => {
if (!stream.active) return;
setIncomeStream((oldState) => {
if (oldState === null) return [{ stream, isAgent }];
if (
@ -149,13 +150,8 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
});
};
const removeIncomeStream = (stream: MediaStream) => {
setIncomeStream((prevState) => {
if (!prevState) return [];
return prevState.filter(
(existingStream) => existingStream.stream.id !== stream.id,
);
});
const removeIncomeStream = () => {
setIncomeStream([]);
};
function onReject() {
@ -181,7 +177,12 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
() => {
player.assistManager.ping(AssistActionsPing.call.end, agentId);
lStream.stop.apply(lStream);
removeIncomeStream(lStream.stream);
removeIncomeStream();
},
() => {
player.assistManager.ping(AssistActionsPing.call.end, agentId);
lStream.stop.apply(lStream);
removeIncomeStream();
},
onReject,
onError,

View file

@ -34,43 +34,40 @@ function VideoContainer({
}
const iid = setInterval(() => {
const track = stream.getVideoTracks()[0];
const settings = track?.getSettings();
const isDummyVideoTrack = settings
? settings.width === 2 ||
settings.frameRate === 0 ||
(!settings.frameRate && !settings.width)
: true;
const shouldBeEnabled = track.enabled && !isDummyVideoTrack;
if (isEnabled !== shouldBeEnabled) {
setEnabled(shouldBeEnabled);
setRemoteEnabled?.(shouldBeEnabled);
if (track) {
if (!track.enabled) {
setEnabled(false);
setRemoteEnabled?.(false);
} else {
setEnabled(true);
setRemoteEnabled?.(true);
}
} else {
setEnabled(false);
setRemoteEnabled?.(false);
}
}, 500);
return () => clearInterval(iid);
}, [stream, isEnabled]);
}, [stream]);
return (
<div
className="flex-1"
style={{
display: isEnabled ? undefined : 'none',
width: isEnabled ? undefined : '0px!important',
height: isEnabled ? undefined : '0px!important',
height: isEnabled ? undefined : '0px !important',
border: '1px solid grey',
transform: local ? 'scaleX(-1)' : undefined,
display: isEnabled ? 'block' : 'none',
}}
>
<video autoPlay ref={ref} muted={muted} style={{ height }} />
{isAgent ? (
<div
style={{
position: 'absolute',
}}
>
{t('Agent')}
</div>
) : null}
<video
autoPlay
ref={ref}
muted={muted}
style={{ height }}
/>
</div>
);
}

View file

@ -24,7 +24,7 @@ function ModuleCard(props: Props) {
<Switch
size="small"
checked={!module.isEnabled}
title={module.isEnabled ? 'Enabled' : 'Disabled'}
title={!module.isEnabled ? 'Enabled' : 'Disabled'}
onChange={() => props.onToggle(module)}
/>
</div>

View file

@ -40,11 +40,12 @@ function Modules() {
};
useEffect(() => {
list(t).forEach((module) => {
const moduleList = list(t)
moduleList.forEach((module) => {
module.isEnabled = modules.includes(module.key);
});
setModulesState(
list(t).filter(
moduleList.filter(
(module) => !module.hidden && (!module.enterprise || isEnterprise),
),
);

View file

@ -6,6 +6,7 @@ import DefaultPlaying from 'Shared/SessionSettings/components/DefaultPlaying';
import DefaultTimezone from 'Shared/SessionSettings/components/DefaultTimezone';
import ListingVisibility from 'Shared/SessionSettings/components/ListingVisibility';
import MouseTrailSettings from 'Shared/SessionSettings/components/MouseTrailSettings';
import VirtualModeSettings from '../shared/SessionSettings/components/VirtualMode';
import DebugLog from './DebugLog';
import { useTranslation } from 'react-i18next';
@ -35,6 +36,7 @@ function SessionsListingSettings() {
<div className="flex flex-col gap-2">
<MouseTrailSettings />
<DebugLog />
<VirtualModeSettings />
</div>
</div>
</div>

View file

@ -6,6 +6,7 @@ import CardSessionsByList from 'Components/Dashboard/Widgets/CardSessionsByList'
import { useModal } from 'Components/ModalContext';
import Widget from '@/mstore/types/widget';
import { useTranslation } from 'react-i18next';
import { FilterKey } from 'Types/filter/filterType';
interface Props {
metric?: any;
@ -35,20 +36,20 @@ function SessionsBy(props: Props) {
...filtersMap[metric.metricOf],
value: [data.name],
type: filtersMap[metric.metricOf].key,
filters: filtersMap[metric.metricOf].filters?.map((f: any) => {
const {
key,
operatorOptions,
category,
icon,
label,
options,
...cleaned
} = f;
return { ...cleaned, type: f.key, value: [] };
}),
filters: [],
};
if (metric.metricOf === FilterKey.FETCH) {
baseFilter.filters = [
{
key: FilterKey.FETCH_URL,
operator: 'is',
value: [data.name],
type: FilterKey.FETCH_URL,
}
];
}
const {
key,
operatorOptions,

View file

@ -23,6 +23,7 @@ function BottomButtons({
<Button
loading={loading}
type="primary"
htmlType="submit"
disabled={loading || !instance.validate()}
id="submit-button"
>

View file

@ -43,7 +43,7 @@ function ClickMapRagePicker() {
<Checkbox onChange={onToggle} label={t('Include rage clicks')} />
<Button size="small" onClick={refreshHeatmapSession}>
{t('Get new session')}
{t('Get new image')}
</Button>
</div>
);

View file

@ -64,6 +64,7 @@ function DashboardView(props: Props) {
};
useEffect(() => {
dashboardStore.resetPeriod();
if (queryParams.has('modal')) {
onAddWidgets();
trimQuery();

View file

@ -117,8 +117,6 @@ const ListView: React.FC<Props> = ({
if (disableSelection) {
const path = withSiteId(`/metrics/${metric.metricId}`, siteId);
history.push(path);
} else {
toggleSelection?.(metric.metricId);
}
};

View file

@ -68,7 +68,7 @@ function MetricsList({
}, [metricStore]);
const isFiltered = metricStore.filter.query !== '' || metricStore.filter.type !== 'all';
const isFiltered = metricStore.filter.query !== '' || metricStore.filter.type !== '';
const searchImageDimensions = { width: 60, height: 'auto' };
const defaultImageDimensions = { width: 600, height: 'auto' };

View file

@ -181,9 +181,10 @@ function WidgetChart(props: Props) {
}
prevMetricRef.current = _metric;
const timestmaps = drillDownPeriod.toTimestamps();
const density = props.isPreview ? metric.density : dashboardStore.selectedDensity
const payload = isSaved
? { ...metricParams }
: { ...params, ...timestmaps, ..._metric.toJson() };
? { ...metricParams, density }
: { ...params, ...timestmaps, ..._metric.toJson(), density };
debounceRequest(
_metric,
payload,

View file

@ -55,7 +55,7 @@ function RangeGranularity({
}
const PAST_24_HR_MS = 24 * 60 * 60 * 1000;
function calculateGranularities(periodDurationMs: number) {
export function calculateGranularities(periodDurationMs: number) {
const granularities = [
{ label: 'Hourly', durationMs: 60 * 60 * 1000 },
{ label: 'Daily', durationMs: 24 * 60 * 60 * 1000 },

View file

@ -1,376 +1,395 @@
import React, { useEffect, useState } from 'react';
import { NoContent, Loader, Pagination } from 'UI';
import { Button, Tag, Tooltip, Dropdown, message } from 'antd';
import { UndoOutlined, DownOutlined } from '@ant-design/icons';
import React, {useEffect, useState} from 'react';
import {NoContent, Loader, Pagination} from 'UI';
import {Button, Tag, Tooltip, Dropdown, message} from 'antd';
import {UndoOutlined, DownOutlined} from '@ant-design/icons';
import cn from 'classnames';
import { useStore } from 'App/mstore';
import {useStore} from 'App/mstore';
import SessionItem from 'Shared/SessionItem';
import { observer } from 'mobx-react-lite';
import { DateTime } from 'luxon';
import { debounce, numberWithCommas } from 'App/utils';
import {observer} from 'mobx-react-lite';
import {DateTime} from 'luxon';
import {debounce, numberWithCommas} from 'App/utils';
import useIsMounted from 'App/hooks/useIsMounted';
import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG';
import { HEATMAP, USER_PATH, FUNNEL } from 'App/constants/card';
import { useTranslation } from 'react-i18next';
import AnimatedSVG, {ICONS} from 'Shared/AnimatedSVG/AnimatedSVG';
import {HEATMAP, USER_PATH, FUNNEL} from 'App/constants/card';
import {useTranslation} from 'react-i18next';
interface Props {
className?: string;
className?: string;
}
function WidgetSessions(props: Props) {
const { t } = useTranslation();
const listRef = React.useRef<HTMLDivElement>(null);
const { className = '' } = props;
const [activeSeries, setActiveSeries] = useState('all');
const [data, setData] = useState<any>([]);
const isMounted = useIsMounted();
const [loading, setLoading] = useState(false);
// all filtering done through series now
const filteredSessions = getListSessionsBySeries(data, 'all');
const { dashboardStore, metricStore, sessionStore, customFieldStore } =
useStore();
const focusedSeries = metricStore.focusedSeriesName;
const filter = dashboardStore.drillDownFilter;
const widget = metricStore.instance;
const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat(
'LLL dd, yyyy HH:mm',
);
const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat(
'LLL dd, yyyy HH:mm',
);
const [seriesOptions, setSeriesOptions] = useState([
{ label: t('All'), value: 'all' },
]);
const hasFilters =
filter.filters.length > 0 ||
filter.startTimestamp !== dashboardStore.drillDownPeriod.start ||
filter.endTimestamp !== dashboardStore.drillDownPeriod.end;
const filterText = filter.filters.length > 0 ? filter.filters[0].value : '';
const metaList = customFieldStore.list.map((i: any) => i.key);
const {t} = useTranslation();
const listRef = React.useRef<HTMLDivElement>(null);
const {className = ''} = props;
const [activeSeries, setActiveSeries] = useState('all');
const [data, setData] = useState<any>([]);
const isMounted = useIsMounted();
const [loading, setLoading] = useState(false);
// all filtering done through series now
const filteredSessions = getListSessionsBySeries(data, 'all');
const {dashboardStore, metricStore, sessionStore, customFieldStore} =
useStore();
const focusedSeries = metricStore.focusedSeriesName;
const filter = dashboardStore.drillDownFilter;
const widget = metricStore.instance;
const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat(
'LLL dd, yyyy HH:mm',
);
const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat(
'LLL dd, yyyy HH:mm',
);
const [seriesOptions, setSeriesOptions] = useState([
{label: t('All'), value: 'all'},
]);
const hasFilters =
filter.filters.length > 0 ||
filter.startTimestamp !== dashboardStore.drillDownPeriod.start ||
filter.endTimestamp !== dashboardStore.drillDownPeriod.end;
const filterText = filter.filters.length > 0 ? filter.filters[0].value : '';
const metaList = customFieldStore.list.map((i: any) => i.key);
const seriesDropdownItems = seriesOptions.map((option) => ({
key: option.value,
label: (
<div onClick={() => setActiveSeries(option.value)}>{option.label}</div>
),
}));
useEffect(() => {
if (!widget.series) return;
const seriesOptions = widget.series.map((item: any) => ({
label: item.name,
value: item.seriesId ?? item.name,
const seriesDropdownItems = seriesOptions.map((option) => ({
key: option.value,
label: (
<div onClick={() => setActiveSeries(option.value)}>{option.label}</div>
),
}));
setSeriesOptions([{ label: t('All'), value: 'all' }, ...seriesOptions]);
}, [widget.series.length]);
const fetchSessions = (metricId: any, filter: any) => {
if (!isMounted()) return;
setLoading(true);
delete filter.eventsOrderSupport;
if (widget.metricType === FUNNEL) {
if (filter.series[0].filter.filters.length === 0) {
setLoading(false);
return setData([]);
}
}
useEffect(() => {
if (!widget.series) return;
const seriesOptions = widget.series.map((item: any) => ({
label: item.name,
value: item.seriesId ?? item.name,
}));
setSeriesOptions([{label: t('All'), value: 'all'}, ...seriesOptions]);
}, [widget.series.length]);
widget
.fetchSessions(metricId, filter)
.then((res: any) => {
setData(res);
if (metricStore.drillDown) {
setTimeout(() => {
message.info(t('Sessions Refreshed!'));
listRef.current?.scrollIntoView({ behavior: 'smooth' });
metricStore.setDrillDown(false);
}, 0);
const fetchSessions = (metricId: any, filter: any) => {
if (!isMounted()) return;
if (widget.metricType === FUNNEL) {
if (filter.series[0].filter.filters.length === 0) {
setLoading(false);
return setData([]);
}
}
})
.finally(() => {
setLoading(false);
});
};
const fetchClickmapSessions = (customFilters: Record<string, any>) => {
sessionStore.getSessions(customFilters).then((data) => {
setData([{ ...data, seriesId: 1, seriesName: 'Clicks' }]);
});
};
const debounceRequest: any = React.useCallback(
debounce(fetchSessions, 1000),
[],
);
const debounceClickMapSearch = React.useCallback(
debounce(fetchClickmapSessions, 1000),
[],
);
const depsString = JSON.stringify(widget.series);
const loadData = () => {
if (widget.metricType === HEATMAP && metricStore.clickMapSearch) {
const clickFilter = {
value: [metricStore.clickMapSearch],
type: 'CLICK',
operator: 'onSelector',
isEvent: true,
// @ts-ignore
filters: [],
};
const timeRange = {
rangeValue: dashboardStore.drillDownPeriod.rangeValue,
startDate: dashboardStore.drillDownPeriod.start,
endDate: dashboardStore.drillDownPeriod.end,
};
const customFilter = {
...filter,
...timeRange,
filters: [...sessionStore.userFilter.filters, clickFilter],
};
debounceClickMapSearch(customFilter);
} else {
const hasStartPoint =
!!widget.startPoint && widget.metricType === USER_PATH;
const onlyFocused = focusedSeries
? widget.series.filter((s) => s.name === focusedSeries)
: widget.series;
const activeSeries = metricStore.disabledSeries.length
? onlyFocused.filter(
(s) => !metricStore.disabledSeries.includes(s.name),
)
: onlyFocused;
const seriesJson = activeSeries.map((s) => s.toJson());
if (hasStartPoint) {
seriesJson[0].filter.filters.push(widget.startPoint.toJson());
}
if (widget.metricType === USER_PATH) {
if (
seriesJson[0].filter.filters[0].value[0] === '' &&
widget.data.nodes
) {
seriesJson[0].filter.filters[0].value = widget.data.nodes[0].name;
} else if (
seriesJson[0].filter.filters[0].value[0] === '' &&
!widget.data.nodes?.length
) {
// no point requesting if we don't have starting point picked by api
return;
setLoading(true);
const filterCopy = {...filter};
delete filterCopy.eventsOrderSupport;
try {
// Handle filters properly with null checks
if (filterCopy.filters && filterCopy.filters.length > 0) {
// Ensure the nested path exists before pushing
if (filterCopy.series?.[0]?.filter) {
if (!filterCopy.series[0].filter.filters) {
filterCopy.series[0].filter.filters = [];
}
filterCopy.series[0].filter.filters.push(...filterCopy.filters);
}
filterCopy.filters = [];
}
} catch (e) {
// do nothing
}
}
debounceRequest(widget.metricId, {
...filter,
series: seriesJson,
page: metricStore.sessionsPage,
limit: metricStore.sessionsPageSize,
});
}
};
useEffect(() => {
metricStore.updateKey('sessionsPage', 1);
loadData();
}, [
filter.startTimestamp,
filter.endTimestamp,
filter.filters,
depsString,
metricStore.clickMapSearch,
focusedSeries,
widget.startPoint,
widget.data.nodes,
metricStore.disabledSeries.length,
]);
useEffect(loadData, [metricStore.sessionsPage]);
useEffect(() => {
if (activeSeries === 'all') {
metricStore.setFocusedSeriesName(null);
} else {
metricStore.setFocusedSeriesName(
seriesOptions.find((option) => option.value === activeSeries)?.label,
false,
);
}
}, [activeSeries]);
useEffect(() => {
if (focusedSeries) {
setActiveSeries(
seriesOptions.find((option) => option.label === focusedSeries)?.value ||
'all',
);
} else {
setActiveSeries('all');
}
}, [focusedSeries]);
widget
.fetchSessions(metricId, filterCopy)
.then((res: any) => {
setData(res);
if (metricStore.drillDown) {
setTimeout(() => {
message.info(t('Sessions Refreshed!'));
listRef.current?.scrollIntoView({behavior: 'smooth'});
metricStore.setDrillDown(false);
}, 0);
}
})
.finally(() => {
setLoading(false);
});
};
const fetchClickmapSessions = (customFilters: Record<string, any>) => {
sessionStore.getSessions(customFilters).then((data) => {
setData([{...data, seriesId: 1, seriesName: 'Clicks'}]);
});
};
const debounceRequest: any = React.useCallback(
debounce(fetchSessions, 1000),
[],
);
const debounceClickMapSearch = React.useCallback(
debounce(fetchClickmapSessions, 1000),
[],
);
const clearFilters = () => {
metricStore.updateKey('sessionsPage', 1);
dashboardStore.resetDrillDownFilter();
};
const depsString = JSON.stringify(widget.series);
return (
<div
className={cn(
className,
'bg-white p-3 pb-0 rounded-xl shadow-sm border mt-3',
)}
>
<div className="flex items-center justify-between">
<div>
<div className="flex items-baseline gap-2">
<h2 className="text-xl">
{metricStore.clickMapSearch ? t('Clicks') : t('Sessions')}
</h2>
<div className="ml-2 color-gray-medium">
{metricStore.clickMapLabel
? `on "${metricStore.clickMapLabel}" `
: null}
{t('between')}{' '}
<span className="font-medium color-gray-darkest">
const loadData = () => {
if (widget.metricType === HEATMAP && metricStore.clickMapSearch) {
const clickFilter = {
value: [metricStore.clickMapSearch],
type: 'CLICK',
operator: 'onSelector',
isEvent: true,
// @ts-ignore
filters: [],
};
const timeRange = {
rangeValue: dashboardStore.drillDownPeriod.rangeValue,
startDate: dashboardStore.drillDownPeriod.start,
endDate: dashboardStore.drillDownPeriod.end,
};
const customFilter = {
...filter,
...timeRange,
filters: [...sessionStore.userFilter.filters, clickFilter],
};
debounceClickMapSearch(customFilter);
} else {
const hasStartPoint =
!!widget.startPoint && widget.metricType === USER_PATH;
const onlyFocused = focusedSeries
? widget.series.filter((s) => s.name === focusedSeries)
: widget.series;
const activeSeries = metricStore.disabledSeries.length
? onlyFocused.filter(
(s) => !metricStore.disabledSeries.includes(s.name),
)
: onlyFocused;
const seriesJson = activeSeries.map((s) => s.toJson());
if (hasStartPoint) {
seriesJson[0].filter.filters.push(widget.startPoint.toJson());
}
if (widget.metricType === USER_PATH) {
if (
seriesJson[0].filter.filters[0].value[0] === '' &&
widget.data.nodes?.length
) {
seriesJson[0].filter.filters[0].value = widget.data.nodes[0].name;
} else if (
seriesJson[0].filter.filters[0].value[0] === '' &&
!widget.data.nodes?.length
) {
// no point requesting if we don't have starting point picked by api
return;
}
}
debounceRequest(widget.metricId, {
...filter,
series: seriesJson,
page: metricStore.sessionsPage,
limit: metricStore.sessionsPageSize,
});
}
};
useEffect(() => {
metricStore.updateKey('sessionsPage', 1);
loadData();
}, [
filter.startTimestamp,
filter.endTimestamp,
filter.filters,
depsString,
metricStore.clickMapSearch,
focusedSeries,
widget.startPoint,
widget.data.nodes,
metricStore.disabledSeries.length,
]);
useEffect(loadData, [metricStore.sessionsPage]);
useEffect(() => {
if (activeSeries === 'all') {
metricStore.setFocusedSeriesName(null);
} else {
metricStore.setFocusedSeriesName(
seriesOptions.find((option) => option.value === activeSeries)?.label,
false,
);
}
}, [activeSeries]);
useEffect(() => {
if (focusedSeries) {
setActiveSeries(
seriesOptions.find((option) => option.label === focusedSeries)?.value ||
'all',
);
} else {
setActiveSeries('all');
}
}, [focusedSeries]);
const clearFilters = () => {
metricStore.updateKey('sessionsPage', 1);
dashboardStore.resetDrillDownFilter();
};
return (
<div
className={cn(
className,
'bg-white p-3 pb-0 rounded-xl shadow-sm border mt-3',
)}
>
<div className="flex items-center justify-between">
<div>
<div className="flex items-baseline gap-2">
<h2 className="text-xl">
{metricStore.clickMapSearch ? t('Clicks') : t('Sessions')}
</h2>
<div className="ml-2 color-gray-medium">
{metricStore.clickMapLabel
? `on "${metricStore.clickMapLabel}" `
: null}
{t('between')}{' '}
<span className="font-medium color-gray-darkest">
{startTime}
</span>{' '}
{t('and')}{' '}
<span className="font-medium color-gray-darkest">
{t('and')}{' '}
<span className="font-medium color-gray-darkest">
{endTime}
</span>{' '}
</div>
{hasFilters && (
<Tooltip title={t('Clear Drilldown')} placement="top">
<Button type="text" size="small" onClick={clearFilters}>
<UndoOutlined />
</Button>
</Tooltip>
)}
</div>
</div>
{hasFilters && (
<Tooltip title={t('Clear Drilldown')} placement="top">
<Button type="text" size="small" onClick={clearFilters}>
<UndoOutlined/>
</Button>
</Tooltip>
)}
</div>
{hasFilters && widget.metricType === 'table' && (
<div className="py-2">
<Tag
closable
onClose={clearFilters}
className="truncate max-w-44 rounded-lg"
>
{filterText}
</Tag>
</div>
)}
</div>
{hasFilters && widget.metricType === 'table' && (
<div className="py-2">
<Tag
closable
onClose={clearFilters}
className="truncate max-w-44 rounded-lg"
>
{filterText}
</Tag>
</div>
)}
</div>
<div className="flex items-center gap-4">
{widget.metricType !== 'table' && widget.metricType !== HEATMAP && (
<div className="flex items-center ml-6">
<div className="flex items-center gap-4">
{widget.metricType !== 'table' && widget.metricType !== HEATMAP && (
<div className="flex items-center ml-6">
<span className="mr-2 color-gray-medium">
{t('Filter by Series')}
</span>
<Dropdown
menu={{
items: seriesDropdownItems,
selectable: true,
selectedKeys: [activeSeries],
}}
trigger={['click']}
>
<Button type="text" size="small">
{seriesOptions.find((option) => option.value === activeSeries)
?.label || t('Select Series')}
<DownOutlined />
</Button>
</Dropdown>
</div>
)}
</div>
</div>
<div className="mt-3">
<Loader loading={loading}>
<NoContent
title={
<div className="flex items-center justify-center flex-col">
<AnimatedSVG name={ICONS.NO_SESSIONS} size={60} />
<div className="mt-4" />
<div className="text-center">
{t('No relevant sessions found for the selected time period')}
<Dropdown
menu={{
items: seriesDropdownItems,
selectable: true,
selectedKeys: [activeSeries],
}}
trigger={['click']}
>
<Button type="text" size="small">
{seriesOptions.find((option) => option.value === activeSeries)
?.label || t('Select Series')}
<DownOutlined/>
</Button>
</Dropdown>
</div>
)}
</div>
</div>
}
show={filteredSessions.sessions.length === 0}
>
{filteredSessions.sessions.map((session: any) => (
<React.Fragment key={session.sessionId}>
<SessionItem
disableUser
session={session}
metaList={metaList}
/>
<div className="border-b" />
</React.Fragment>
))}
</div>
<div
className="flex items-center justify-between p-5"
ref={listRef}
>
<div>
{t('Showing')}{' '}
<span className="font-medium">
<div className="mt-3">
<Loader loading={loading}>
<NoContent
title={
<div className="flex items-center justify-center flex-col">
<AnimatedSVG name={ICONS.NO_SESSIONS} size={60}/>
<div className="mt-4"/>
<div className="text-center">
{t('No relevant sessions found for the selected time period')}
</div>
</div>
}
show={filteredSessions.sessions.length === 0}
>
{filteredSessions.sessions.map((session: any) => (
<React.Fragment key={session.sessionId}>
<SessionItem
disableUser
session={session}
metaList={metaList}
/>
<div className="border-b"/>
</React.Fragment>
))}
<div
className="flex items-center justify-between p-5"
ref={listRef}
>
<div>
{t('Showing')}{' '}
<span className="font-medium">
{(metricStore.sessionsPage - 1) *
metricStore.sessionsPageSize +
1}
metricStore.sessionsPageSize +
1}
</span>{' '}
{t('to')}{' '}
<span className="font-medium">
{t('to')}{' '}
<span className="font-medium">
{(metricStore.sessionsPage - 1) *
metricStore.sessionsPageSize +
filteredSessions.sessions.length}
metricStore.sessionsPageSize +
filteredSessions.sessions.length}
</span>{' '}
{t('of')}{' '}
<span className="font-medium">
{t('of')}{' '}
<span className="font-medium">
{numberWithCommas(filteredSessions.total)}
</span>{' '}
{t('sessions.')}
</div>
<Pagination
page={metricStore.sessionsPage}
total={filteredSessions.total}
onPageChange={(page: any) =>
metricStore.updateKey('sessionsPage', page)
}
limit={metricStore.sessionsPageSize}
debounceRequest={500}
/>
{t('sessions.')}
</div>
<Pagination
page={metricStore.sessionsPage}
total={filteredSessions.total}
onPageChange={(page: any) =>
metricStore.updateKey('sessionsPage', page)
}
limit={metricStore.sessionsPageSize}
debounceRequest={500}
/>
</div>
</NoContent>
</Loader>
</div>
</NoContent>
</Loader>
</div>
</div>
);
</div>
);
}
const getListSessionsBySeries = (data: any, seriesId: any) => {
const arr = data.reduce(
(arr: any, element: any) => {
if (seriesId === 'all') {
const sessionIds = arr.sessions.map((i: any) => i.sessionId);
const sessions = element.sessions.filter(
(i: any) => !sessionIds.includes(i.sessionId),
);
arr.sessions.push(...sessions);
} else if (element.seriesId === seriesId) {
const sessionIds = arr.sessions.map((i: any) => i.sessionId);
const sessions = element.sessions.filter(
(i: any) => !sessionIds.includes(i.sessionId),
);
const duplicates = element.sessions.length - sessions.length;
arr.sessions.push(...sessions);
arr.total = element.total - duplicates;
}
return arr;
},
{ sessions: [] },
);
arr.total =
seriesId === 'all'
? Math.max(...data.map((i: any) => i.total))
: data.find((i: any) => i.seriesId === seriesId).total;
return arr;
const arr = data.reduce(
(arr: any, element: any) => {
if (seriesId === 'all') {
const sessionIds = arr.sessions.map((i: any) => i.sessionId);
const sessions = element.sessions.filter(
(i: any) => !sessionIds.includes(i.sessionId),
);
arr.sessions.push(...sessions);
} else if (element.seriesId === seriesId) {
const sessionIds = arr.sessions.map((i: any) => i.sessionId);
const sessions = element.sessions.filter(
(i: any) => !sessionIds.includes(i.sessionId),
);
const duplicates = element.sessions.length - sessions.length;
arr.sessions.push(...sessions);
arr.total = element.total - duplicates;
}
return arr;
},
{sessions: []},
);
arr.total =
seriesId === 'all'
? Math.max(...data.map((i: any) => i.total))
: data.find((i: any) => i.seriesId === seriesId).total;
return arr;
};
export default observer(WidgetSessions);

View file

@ -92,6 +92,9 @@ function WidgetView({
filter: { filters: selectedCard.filters },
}),
];
} else if (selectedCard.cardType === TABLE) {
cardData.series = [new FilterSeries()];
cardData.series[0].filter.eventsOrder = 'and';
}
if (selectedCard.cardType === FUNNEL) {
cardData.series = [new FilterSeries()];

View file

@ -83,6 +83,7 @@ function WidgetWrapperNew(props: Props & RouteComponentProps) {
});
const onChartClick = () => {
dashboardStore.setDrillDownPeriod(dashboardStore.period);
// if (!isWidget || isPredefined) return;
props.history.push(
withSiteId(

View file

@ -14,7 +14,7 @@ interface SSOLoginProps {
const SSOLogin = ({ authDetails, enforceSSO = false }: SSOLoginProps) => {
const { userStore } = useStore();
const { t } = useTranslation();
const { isEnterprise } = userStore;
const { isSSOSupported } = userStore;
const getSSOLink = () =>
window !== window.top
@ -23,7 +23,7 @@ const SSOLogin = ({ authDetails, enforceSSO = false }: SSOLoginProps) => {
const ssoLink = getSSOLink();
const ssoButtonText = `${t('Login with SSO')} ${authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
}`;
}`;
if (enforceSSO) {
return (
@ -47,7 +47,7 @@ const SSOLogin = ({ authDetails, enforceSSO = false }: SSOLoginProps) => {
<Tooltip
title={
<div className="text-center">
{isEnterprise ? (
{isSSOSupported ? (
<span>
{t('SSO has not been configured.')}
<br />

View file

@ -8,7 +8,7 @@ import {
LikeFilled,
LikeOutlined,
} from '@ant-design/icons';
import { Tour, TourProps } from './.store/antd-virtual-7db13b4af6/package';
import { Tour, TourProps } from 'antd';
import { useTranslation } from 'react-i18next';
interface Props {

View file

@ -91,7 +91,7 @@ function PlayerBlockHeader(props: Props) {
)}
</div>
</div>
<div className="relative border-l" style={{ minWidth: '270px' }}>
<div className="relative border-l" style={{ minWidth: activeTab === 'EXPORT' ? '360px' : '270px' }}>
<Tabs
tabs={TABS}
active={activeTab}

View file

@ -61,7 +61,7 @@ function PlayerContent({
className="w-full"
style={
activeTab && !fullscreen
? { maxWidth: 'calc(100% - 270px)' }
? { maxWidth: `calc(100% - ${activeTab === 'EXPORT' ? '360px' : '270px'})` }
: undefined
}
>

View file

@ -42,7 +42,7 @@ function DropdownAudioPlayer({
return {
url: data.url,
timestamp: data.timestamp,
start: startTs,
start: Math.max(0, startTs),
};
}),
[audioEvents.length, sessionStart],

View file

@ -114,19 +114,17 @@ function PlayerBlockHeader(props: any) {
)}
{_metaList.length > 0 && (
<div className="h-full flex items-center px-2 gap-1">
<SessionMetaList
className=""
metaList={_metaList}
maxLength={2}
/>
</div>
<SessionMetaList
horizontal
metaList={_metaList}
maxLength={2}
/>
)}
</div>
</div>
<div
className="px-2 relative border-l border-l-gray-lighter"
style={{ minWidth: '270px' }}
style={{ minWidth: activeTab === 'EXPORT' ? '360px' : '270px' }}
>
<Tabs
tabs={TABS}

View file

@ -65,7 +65,7 @@ function PlayerContent({
className="w-full"
style={
activeTab && !fullscreen
? { maxWidth: 'calc(100% - 270px)' }
? { maxWidth: `calc(100% - ${activeTab === 'EXPORT' ? '360px' : '270px'})` }
: undefined
}
>

View file

@ -182,6 +182,7 @@ function Player(props: IProps) {
setActiveTab={(tab: string) =>
activeTab === tab ? props.setActiveTab('') : props.setActiveTab(tab)
}
activeTab={activeTab}
speedDown={playerContext.player.speedDown}
speedUp={playerContext.player.speedUp}
jump={playerContext.player.jump}

View file

@ -7,13 +7,16 @@ import { Icon } from 'UI';
function LogsButton({
integrated,
onClick,
shorten,
}: {
integrated: string[];
onClick: () => void;
shorten?: boolean;
}) {
return (
<ControlButton
label="Traces"
label={shorten ? null : "Traces"}
customKey="traces"
customTags={
<Avatar.Group>
{integrated.map((name) => (

View file

@ -38,8 +38,8 @@ function WebPlayer(props: any) {
uxtestingStore,
uiPlayerStore,
integrationsStore,
userStore,
} = useStore();
const devTools = sessionStore.devTools
const session = sessionStore.current;
const { prefetched } = sessionStore;
const startedAt = sessionStore.current.startedAt || 0;
@ -57,14 +57,17 @@ function WebPlayer(props: any) {
const [fullView, setFullView] = useState(false);
React.useEffect(() => {
if (windowActive) {
const handleActivation = () => {
if (!document.hidden) {
setWindowActive(true);
document.removeEventListener('visibilitychange', handleActivation);
}
};
document.addEventListener('visibilitychange', handleActivation);
const handleActivation = () => {
if (!document.hidden) {
setWindowActive(true);
document.removeEventListener('visibilitychange', handleActivation);
}
};
document.addEventListener('visibilitychange', handleActivation);
return () => {
devTools.update('network', { activeTab: 'ALL' });
document.removeEventListener('visibilitychange', handleActivation);
}
}, []);

View file

@ -169,6 +169,6 @@ function TabChange({ from, to, activeUrl, onClick }) {
</div>
</div>
);
}
};
export default observer(EventGroupWrapper);

View file

@ -4,17 +4,17 @@ import cn from 'classnames';
import { observer } from 'mobx-react-lite';
import React from 'react';
import { VList, VListHandle } from 'virtua';
import { Button } from 'antd'
import { Button } from 'antd';
import { PlayerContext } from 'App/components/Session/playerContext';
import { useStore } from 'App/mstore';
import { Icon } from 'UI';
import { Search } from 'lucide-react'
import { Search } from 'lucide-react';
import EventGroupWrapper from './EventGroupWrapper';
import EventSearch from './EventSearch/EventSearch';
import styles from './eventsBlock.module.css';
import { useTranslation } from 'react-i18next';
import { CloseOutlined } from ".store/@ant-design-icons-virtual-42686020c5/package";
import { Tooltip } from ".store/antd-virtual-9dbfadb7f6/package";
import { CloseOutlined } from "@ant-design/icons";
import { Tooltip } from "antd";
import { getDefaultFramework, frameworkIcons } from "../UnitStepsModal";
interface IProps {
@ -25,7 +25,7 @@ const MODES = {
SELECT: 'select',
SEARCH: 'search',
EXPORT: 'export',
}
};
function EventsBlock(props: IProps) {
const defaultFramework = getDefaultFramework();
@ -95,7 +95,7 @@ function EventsBlock(props: IProps) {
? e.time >= zoomStartTs && e.time <= zoomEndTs
: false
: true,
);
);
}, [
filteredLength,
notesWithEvtsLength,
@ -126,6 +126,7 @@ function EventsBlock(props: IProps) {
},
[usedEvents, time, endTime],
);
const currentTimeEventIndex = findLastFitting(time);
const write = ({
@ -182,6 +183,7 @@ function EventsBlock(props: IProps) {
const isTabChange = 'type' in event && event.type === 'TABCHANGE';
const isCurrent = index === currentTimeEventIndex;
const isPrev = index < currentTimeEventIndex;
return (
<EventGroupWrapper
query={query}
@ -249,12 +251,14 @@ function EventsBlock(props: IProps) {
onClick={() => setMode(MODES.SEARCH)}
>
<Search size={14} />
<div>{t('Search')}&nbsp;{usedEvents.length}&nbsp;{t('events')}</div>
<div>
{t('Search')}&nbsp;{usedEvents.length}&nbsp;{t('events')}
</div>
</Button>
<Tooltip title={t('Close Panel')} placement='bottom' >
<Tooltip title={t('Close Panel')} placement="bottom">
<Button
className="ml-auto"
type='text'
type="text"
onClick={() => {
setActiveTab('');
}}
@ -263,19 +267,23 @@ function EventsBlock(props: IProps) {
</Tooltip>
</div>
) : null}
{mode === MODES.SEARCH ?
{mode === MODES.SEARCH ? (
<div className={'flex items-center gap-2'}>
<EventSearch
onChange={write}
setActiveTab={setActiveTab}
value={query}
eventsText={
usedEvents.length ? `${usedEvents.length} ${t('Events')}` : `0 ${t('Events')}`
usedEvents.length
? `${usedEvents.length} ${t('Events')}`
: `0 ${t('Events')}`
}
/>
<Button type={'text'} onClick={() => setMode(MODES.SELECT)}>{t('Cancel')}</Button>
<Button type={'text'} onClick={() => setMode(MODES.SELECT)}>
{t('Cancel')}
</Button>
</div>
: null}
) : null}
</div>
<div
className={cn('flex-1 pb-4', styles.eventsList)}

View file

@ -4,7 +4,7 @@ import { Popover, Button } from 'antd';
import stl from './controlButton.module.css';
interface IProps {
label: string;
label: React.ReactNode;
icon?: string;
disabled?: boolean;
onClick?: () => void;
@ -18,6 +18,7 @@ interface IProps {
noIcon?: boolean;
popover?: React.ReactNode;
customTags?: React.ReactNode;
customKey?: string;
}
function ControlButton({
@ -28,29 +29,28 @@ function ControlButton({
active = false,
popover = undefined,
customTags,
customKey,
}: IProps) {
return (
<Popover content={popover} open={popover ? undefined : false}>
<Button
size="small"
onClick={onClick}
id={`control-button-${label.toLowerCase()}`}
id={`control-button-${customKey ? customKey.toLowerCase() : label!.toString().toLowerCase()}`}
disabled={disabled}
>
{customTags}
{hasErrors && (
<div className={stl.labels}>
<div className={stl.errorSymbol} />
</div>
<div className="w-2 h-2 rounded-full bg-red" />
)}
<span
{label && <span
className={cn(
'font-semibold hover:text-main',
active ? 'color-main' : 'color-gray-darkest',
)}
>
{label}
</span>
</span>}
</Button>
</Popover>
);

View file

@ -32,6 +32,8 @@ import {
} from 'App/mstore/uiPlayerStore';
import { Icon } from 'UI';
import LogsButton from 'App/components/Session/Player/SharedComponents/BackendLogs/LogsButton';
import { CodeOutlined, DashboardOutlined, ClusterOutlined } from '@ant-design/icons';
import { ArrowDownUp, ListCollapse, Merge, Waypoints } from 'lucide-react'
import ControlButton from './ControlButton';
import Timeline from './Timeline';
@ -52,23 +54,23 @@ export const SKIP_INTERVALS = {
function getStorageName(type: any) {
switch (type) {
case STORAGE_TYPES.REDUX:
return 'Redux';
return { name: 'Redux', icon: <Icon name='integrations/redux' size={14} /> };
case STORAGE_TYPES.MOBX:
return 'Mobx';
return { name: 'Mobx', icon: <Icon name='integrations/mobx' size={14} /> };
case STORAGE_TYPES.VUEX:
return 'Vuex';
return { name: 'Vuex', icon: <Icon name='integrations/vuejs' size={14} /> };
case STORAGE_TYPES.NGRX:
return 'NgRx';
return { name: 'NgRx', icon: <Icon name='integrations/ngrx' size={14} /> };
case STORAGE_TYPES.ZUSTAND:
return 'Zustand';
return { name: 'Zustand', icon: <Icon name='integrations/zustand' size={14} /> };
case STORAGE_TYPES.NONE:
return 'State';
return { name: 'State', icon: <ClusterOutlined size={14} /> };
default:
return 'State';
return { name: 'State', icon: <ClusterOutlined size={14} /> };
}
}
function Controls({ setActiveTab }: any) {
function Controls({ setActiveTab, activeTab }: any) {
const { player, store } = React.useContext(PlayerContext);
const {
uxtestingStore,
@ -191,6 +193,7 @@ function Controls({ setActiveTab }: any) {
bottomBlock={bottomBlock}
disabled={disabled}
events={events}
activeTab={activeTab}
/>
)}
@ -212,6 +215,7 @@ interface IDevtoolsButtons {
bottomBlock: number;
disabled: boolean;
events: any[];
activeTab?: string;
}
const DevtoolsButtons = observer(
@ -221,6 +225,7 @@ const DevtoolsButtons = observer(
bottomBlock,
disabled,
events,
activeTab,
}: IDevtoolsButtons) => {
const { t } = useTranslation();
const { aiSummaryStore, integrationsStore } = useStore();
@ -262,6 +267,36 @@ const DevtoolsButtons = observer(
const possibleAudio = events.filter((e) => e.name.includes('media/audio'));
const integratedServices =
integrationsStore.integrations.backendLogIntegrations;
const showIcons = activeTab === 'EXPORT'
const labels = {
console: {
icon: <CodeOutlined size={14} />,
label: t('Console'),
},
performance: {
icon: <DashboardOutlined size={14} />,
label: t('Performance'),
},
network: {
icon: <ArrowDownUp size={14} strokeWidth={2} />,
label: t('Network'),
},
events: {
icon: <ListCollapse size={14} strokeWidth={2} />,
label: t('Events'),
},
state: {
icon: getStorageName(storageType).icon,
label: getStorageName(storageType).name,
},
graphql: {
icon: <Merge size={14} strokeWidth={2} />,
label: 'Graphql',
}
}
// @ts-ignore
const getLabel = (block: string) => labels[block][showIcons ? 'icon' : 'label']
return (
<>
{isSaas ? <SummaryButton onClick={showSummary} /> : null}
@ -274,6 +309,7 @@ const DevtoolsButtons = observer(
</div>
</div>
}
customKey="xray"
label="X-Ray"
onClick={() => toggleBottomTools(OVERVIEW)}
active={bottomBlock === OVERVIEW && !inspectorMode}
@ -286,10 +322,11 @@ const DevtoolsButtons = observer(
<div>{t('Launch Console')}</div>
</div>
}
customKey="console"
disabled={disableButtons}
onClick={() => toggleBottomTools(CONSOLE)}
active={bottomBlock === CONSOLE && !inspectorMode}
label={t('Console')}
label={getLabel('console')}
hasErrors={logRedCount > 0 || showExceptions}
/>
@ -300,10 +337,11 @@ const DevtoolsButtons = observer(
<div>{t('Launch Network')}</div>
</div>
}
customKey="network"
disabled={disableButtons}
onClick={() => toggleBottomTools(NETWORK)}
active={bottomBlock === NETWORK && !inspectorMode}
label={t('Network')}
label={getLabel('network')}
hasErrors={resourceRedCount > 0}
/>
@ -314,10 +352,11 @@ const DevtoolsButtons = observer(
<div>{t('Launch Performance')}</div>
</div>
}
customKey="performance"
disabled={disableButtons}
onClick={() => toggleBottomTools(PERFORMANCE)}
active={bottomBlock === PERFORMANCE && !inspectorMode}
label="Performance"
label={getLabel('performance')}
/>
{showGraphql && (
@ -325,7 +364,8 @@ const DevtoolsButtons = observer(
disabled={disableButtons}
onClick={() => toggleBottomTools(GRAPHQL)}
active={bottomBlock === GRAPHQL && !inspectorMode}
label="Graphql"
label={getLabel('graphql')}
customKey="graphql"
/>
)}
@ -337,10 +377,11 @@ const DevtoolsButtons = observer(
<div>{t('Launch State')}</div>
</div>
}
customKey="state"
disabled={disableButtons}
onClick={() => toggleBottomTools(STORAGE)}
active={bottomBlock === STORAGE && !inspectorMode}
label={getStorageName(storageType) as string}
label={getLabel('state')}
/>
)}
<ControlButton
@ -350,14 +391,16 @@ const DevtoolsButtons = observer(
<div>{t('Launch Events')}</div>
</div>
}
customKey="events"
disabled={disableButtons}
onClick={() => toggleBottomTools(STACKEVENTS)}
active={bottomBlock === STACKEVENTS && !inspectorMode}
label={t('Events')}
label={getLabel('events')}
hasErrors={stackRedCount > 0}
/>
{showProfiler && (
<ControlButton
customKey="profiler"
disabled={disableButtons}
onClick={() => toggleBottomTools(PROFILER)}
active={bottomBlock === PROFILER && !inspectorMode}
@ -368,6 +411,7 @@ const DevtoolsButtons = observer(
<LogsButton
integrated={integratedServices.map((service) => service.name)}
onClick={() => toggleBottomTools(BACKENDLOGS)}
shorten={showIcons}
/>
) : null}
{possibleAudio.length ? (

View file

@ -6,9 +6,11 @@ import {
import { observer } from 'mobx-react-lite';
import stl from './timeline.module.css';
import { getTimelinePosition } from './getTimelinePosition';
import { useStore } from '@/mstore';
function EventsList() {
const { store } = useContext(PlayerContext);
const { uiPlayerStore } = useStore();
const { eventCount, endTime } = store.get();
const { tabStates } = store.get();
@ -17,7 +19,6 @@ function EventsList() {
() => Object.values(tabStates)[0]?.eventList.filter((e) => e.time) || [],
[eventCount],
);
React.useEffect(() => {
const hasDuplicates = events.some(
(e, i) =>

View file

@ -49,7 +49,6 @@
z-index: 2;
}
.event {
position: absolute;
width: 2px;

View file

@ -38,6 +38,7 @@ function SubHeader(props) {
projectsStore,
userStore,
issueReportingStore,
settingsStore
} = useStore();
const { t } = useTranslation();
const { favorite } = sessionStore.current;
@ -45,7 +46,7 @@ function SubHeader(props) {
const currentSession = sessionStore.current;
const projectId = projectsStore.siteId;
const integrations = integrationsStore.issues.list;
const { store } = React.useContext(PlayerContext);
const { player, store } = React.useContext(PlayerContext);
const { location: currentLocation = 'loading...' } = store.get();
const hasIframe = localStorage.getItem(IFRAME) === 'true';
const [hideTools, setHideTools] = React.useState(false);
@ -127,6 +128,13 @@ function SubHeader(props) {
});
};
const showVModeBadge = store.get().vModeBadge;
const onVMode = () => {
settingsStore.sessionSettings.updateKey('virtualMode', true);
player.enableVMode?.();
location.reload();
}
return (
<>
<div
@ -143,6 +151,8 @@ function SubHeader(props) {
siteId={projectId!}
currentLocation={currentLocation}
version={currentSession?.trackerVersion ?? ''}
virtualElsFailed={showVModeBadge}
onVMode={onVMode}
/>
<SessionTabs />

View file

@ -202,7 +202,7 @@ function UnitStepsModal({ onClose }: Props) {
<div className={'w-full'}>
<CodeBlock
width={340}
height={'calc(100vh - 146px)'}
height={'calc(100vh - 174px)'}
extra={`${events.length} Events`}
copy
code={eventStr}

View file

@ -34,38 +34,46 @@ const WarnBadge = React.memo(
currentLocation,
version,
siteId,
virtualElsFailed,
onVMode,
}: {
currentLocation: string;
version: string;
siteId: string;
virtualElsFailed: boolean;
onVMode: () => void;
}) => {
const { t } = useTranslation();
const localhostWarnSiteKey = localhostWarn(siteId);
const defaultLocalhostWarn =
localStorage.getItem(localhostWarnSiteKey) !== '1';
const localhostWarnActive =
const localhostWarnActive = Boolean(
currentLocation &&
defaultLocalhostWarn &&
/(localhost)|(127.0.0.1)|(0.0.0.0)/.test(currentLocation);
/(localhost)|(127.0.0.1)|(0.0.0.0)/.test(currentLocation)
)
const trackerVersion = window.env.TRACKER_VERSION ?? undefined;
const trackerVerDiff = compareVersions(version, trackerVersion);
const trackerWarnActive = trackerVerDiff !== VersionComparison.Same;
const [showLocalhostWarn, setLocalhostWarn] =
React.useState(localhostWarnActive);
const [showTrackerWarn, setTrackerWarn] = React.useState(trackerWarnActive);
const [warnings, setWarnings] = React.useState<[localhostWarn: boolean, trackerWarn: boolean, virtualElsFailWarn: boolean]>([localhostWarnActive, trackerWarnActive, virtualElsFailed])
const closeWarning = (type: 1 | 2) => {
React.useEffect(() => {
setWarnings([localhostWarnActive, trackerWarnActive, virtualElsFailed])
}, [localhostWarnActive, trackerWarnActive, virtualElsFailed])
const closeWarning = (type: 0 | 1 | 2) => {
if (type === 1) {
localStorage.setItem(localhostWarnSiteKey, '1');
setLocalhostWarn(false);
}
if (type === 2) {
setTrackerWarn(false);
}
setWarnings((prev) => {
const newWarnings = [...prev];
newWarnings[type] = false;
return newWarnings;
});
};
if (!showLocalhostWarn && !showTrackerWarn) return null;
if (!warnings.some(el => el === true)) return null;
return (
<div
@ -79,7 +87,7 @@ const WarnBadge = React.memo(
fontWeight: 500,
}}
>
{showLocalhostWarn ? (
{warnings[0] ? (
<div className="px-3 py-1 border border-gray-lighter drop-shadow-md rounded bg-active-blue flex items-center justify-between">
<div>
<span>{t('Some assets may load incorrectly on localhost.')}</span>
@ -101,7 +109,7 @@ const WarnBadge = React.memo(
</div>
</div>
) : null}
{showTrackerWarn ? (
{warnings[1] ? (
<div className="px-3 py-1 border border-gray-lighter drop-shadow-md rounded bg-active-blue flex items-center justify-between">
<div>
<div>
@ -125,6 +133,21 @@ const WarnBadge = React.memo(
</div>
</div>
<div
className="py-1 ml-3 cursor-pointer"
onClick={() => closeWarning(1)}
>
<Icon name="close" size={16} color="black" />
</div>
</div>
) : null}
{warnings[2] ? (
<div className="px-3 py-1 border border-gray-lighter drop-shadow-md rounded bg-active-blue flex items-center justify-between">
<div className="flex flex-col">
<div>{t('If you have issues displaying custom HTML elements (i.e when using LWC), consider turning on Virtual Mode.')}</div>
<div className='link' onClick={onVMode}>{t('Enable')}</div>
</div>
<div
className="py-1 ml-3 cursor-pointer"
onClick={() => closeWarning(2)}

View file

@ -12,60 +12,123 @@ import {
getDateRangeFromValue,
getDateRangeLabel,
} from 'App/dateRange';
import { DateTime, Interval } from 'luxon';
import { DateTime, Interval, Settings } from 'luxon';
import styles from './dateRangePopup.module.css';
import { useTranslation } from 'react-i18next';
function DateRangePopup(props: any) {
const { t } = useTranslation();
const [displayDates, setDisplayDates] = React.useState<[Date, Date]>([new Date(), new Date()]);
const [range, setRange] = React.useState(
props.selectedDateRange ||
Interval.fromDateTimes(DateTime.now(), DateTime.now()),
);
const [value, setValue] = React.useState<string | null>(null);
const selectCustomRange = (range) => {
let newRange;
if (props.singleDay) {
newRange = Interval.fromDateTimes(
DateTime.fromJSDate(range),
DateTime.fromJSDate(range),
);
} else {
newRange = Interval.fromDateTimes(
DateTime.fromJSDate(range[0]),
DateTime.fromJSDate(range[1]),
);
}
setRange(newRange);
React.useEffect(() => {
if (props.selectedDateRange) {
const start = new Date(
props.selectedDateRange.start.year,
props.selectedDateRange.start.month - 1, // JS months are 0-based
props.selectedDateRange.start.day
);
const end = new Date(
props.selectedDateRange.end.year,
props.selectedDateRange.end.month - 1,
props.selectedDateRange.end.day
);
setDisplayDates([start, end]);
}
}, [props.selectedDateRange]);
const createNaiveTime = (dateTime: DateTime) => {
if (!dateTime) return null;
return DateTime.fromObject({
hour: dateTime.hour,
minute: dateTime.minute
});
};
const selectCustomRange = (newDates: [Date, Date]) => {
if (!newDates || !newDates[0] || !newDates[1]) return;
setDisplayDates(newDates);
const selectedTzStart = DateTime.fromObject({
year: newDates[0].getFullYear(),
month: newDates[0].getMonth() + 1,
day: newDates[0].getDate(),
hour: 0,
minute: 0
}).setZone(Settings.defaultZone);
const selectedTzEnd = DateTime.fromObject({
year: newDates[1].getFullYear(),
month: newDates[1].getMonth() + 1,
day: newDates[1].getDate(),
hour: 23,
minute: 59
}).setZone(Settings.defaultZone);
const updatedRange = Interval.fromDateTimes(selectedTzStart, selectedTzEnd);
setRange(updatedRange);
setValue(CUSTOM_RANGE);
};
const setRangeTimeStart = (value: DateTime) => {
if (!range.end || value > range.end) {
return;
}
const newRange = range.start.set({
hour: value.hour,
minute: value.minute,
const setRangeTimeStart = (naiveTime: DateTime) => {
if (!range.end || !naiveTime) return;
const newStart = range.start.set({
hour: naiveTime.hour,
minute: naiveTime.minute
});
setRange(Interval.fromDateTimes(newRange, range.end));
if (newStart > range.end) return;
setRange(Interval.fromDateTimes(newStart, range.end));
setValue(CUSTOM_RANGE);
};
const setRangeTimeEnd = (value: DateTime) => {
if (!range.start || (value && value < range.start)) {
return;
}
const newRange = range.end.set({ hour: value.hour, minute: value.minute });
setRange(Interval.fromDateTimes(range.start, newRange));
const setRangeTimeEnd = (naiveTime: DateTime) => {
if (!range.start || !naiveTime) return;
const newEnd = range.end.set({
hour: naiveTime.hour,
minute: naiveTime.minute
});
if (newEnd < range.start) return;
setRange(Interval.fromDateTimes(range.start, newEnd));
setValue(CUSTOM_RANGE);
};
const selectValue = (value: string) => {
const range = getDateRangeFromValue(value);
setRange(range);
const newRange = getDateRangeFromValue(value);
if (!newRange.start || !newRange.end) {
setRange(Interval.fromDateTimes(DateTime.now(), DateTime.now()));
setDisplayDates([new Date(), new Date()]);
setValue(null);
return;
}
const zonedStart = newRange.start.setZone(Settings.defaultZone);
const zonedEnd = newRange.end.setZone(Settings.defaultZone);
setRange(Interval.fromDateTimes(zonedStart, zonedEnd));
const start = new Date(
zonedStart.year,
zonedStart.month - 1,
zonedStart.day
);
const end = new Date(
zonedEnd.year,
zonedEnd.month - 1,
zonedEnd.day
);
setDisplayDates([start, end]);
setValue(value);
};
@ -77,9 +140,9 @@ function DateRangePopup(props: any) {
const isUSLocale =
navigator.language === 'en-US' || navigator.language.startsWith('en-US');
const rangeForDisplay = props.singleDay
? range.start.ts
: [range.start!.startOf('day').ts, range.end!.startOf('day').ts];
const naiveStartTime = createNaiveTime(range.start);
const naiveEndTime = createNaiveTime(range.end);
return (
<div className={styles.wrapper}>
<div className={`${styles.body} h-fit`}>
@ -103,7 +166,7 @@ function DateRangePopup(props: any) {
shouldCloseCalendar={() => false}
isOpen
maxDate={new Date()}
value={rangeForDisplay}
value={displayDates}
calendarProps={{
tileDisabled: props.isTileDisabled,
selectRange: !props.singleDay,
@ -122,7 +185,7 @@ function DateRangePopup(props: any) {
<span>{range.start.toFormat(isUSLocale ? 'MM/dd' : 'dd/MM')} </span>
<TimePicker
format={isUSLocale ? 'hh:mm a' : 'HH:mm'}
value={range.start}
value={naiveStartTime}
onChange={setRangeTimeStart}
needConfirm={false}
showNow={false}
@ -132,7 +195,7 @@ function DateRangePopup(props: any) {
<span>{range.end.toFormat(isUSLocale ? 'MM/dd' : 'dd/MM')} </span>
<TimePicker
format={isUSLocale ? 'hh:mm a' : 'HH:mm'}
value={range.end}
value={naiveEndTime}
onChange={setRangeTimeEnd}
needConfirm={false}
showNow={false}

View file

@ -1,9 +1,17 @@
/* eslint-disable i18next/no-literal-string */
import { ResourceType, Timed } from 'Player';
import { WsChannel } from 'Player/web/messages';
import MobilePlayer from 'Player/mobile/IOSPlayer';
import WebPlayer from 'Player/web/WebPlayer';
import { observer } from 'mobx-react-lite';
import React, { useMemo, useState } from 'react';
import React, {
useMemo,
useState,
useEffect,
useCallback,
useRef,
} from 'react';
import i18n from 'App/i18n'
import { useModal } from 'App/components/Modal';
import {
@ -12,25 +20,27 @@ import {
} from 'App/components/Session/playerContext';
import { formatMs } from 'App/date';
import { useStore } from 'App/mstore';
import { formatBytes } from 'App/utils';
import { formatBytes, debounceCall } from 'App/utils';
import { Icon, NoContent, Tabs } from 'UI';
import { Tooltip, Input, Switch, Form } from 'antd';
import { SearchOutlined, InfoCircleOutlined } from '@ant-design/icons';
import {
SearchOutlined,
InfoCircleOutlined,
} from '@ant-design/icons';
import FetchDetailsModal from 'Shared/FetchDetailsModal';
import { WsChannel } from 'App/player/web/messages';
import BottomBlock from '../BottomBlock';
import InfoLine from '../BottomBlock/InfoLine';
import TabSelector from '../TabSelector';
import TimeTable from '../TimeTable';
import useAutoscroll, { getLastItemTime } from '../useAutoscroll';
import { useRegExListFilterMemo, useTabListFilterMemo } from '../useListFilter';
import WSPanel from './WSPanel';
import { useTranslation } from 'react-i18next';
import { mergeListsWithZoom, processInChunks } from './utils'
// Constants remain the same
const INDEX_KEY = 'network';
const ALL = 'ALL';
const XHR = 'xhr';
const JS = 'js';
@ -62,6 +72,9 @@ export const NETWORK_TABS = TAP_KEYS.map((tab) => ({
const DOM_LOADED_TIME_COLOR = 'teal';
const LOAD_TIME_COLOR = 'red';
const BATCH_SIZE = 2500;
const INITIAL_LOAD_SIZE = 5000;
export function renderType(r: any) {
return (
<Tooltip style={{ width: '100%' }} title={<div>{r.type}</div>}>
@ -79,13 +92,17 @@ export function renderName(r: any) {
}
function renderSize(r: any) {
const { t } = useTranslation();
if (r.responseBodySize) return formatBytes(r.responseBodySize);
const t = i18n.t;
const notCaptured = t('Not captured');
const resSizeStr = t('Resource size')
let triggerText;
let content;
if (r.decodedBodySize == null || r.decodedBodySize === 0) {
if (r.responseBodySize) {
triggerText = formatBytes(r.responseBodySize);
content = undefined;
} else if (r.decodedBodySize == null || r.decodedBodySize === 0) {
triggerText = 'x';
content = t('Not captured');
content = notCaptured;
} else {
const headerSize = r.headerSize || 0;
const showTransferred = r.headerSize != null;
@ -100,7 +117,7 @@ function renderSize(r: any) {
)} transferred over network`}
</li>
)}
<li>{`${t('Resource size')}: ${formatBytes(r.decodedBodySize)} `}</li>
<li>{`${resSizeStr}: ${formatBytes(r.decodedBodySize)} `}</li>
</ul>
);
}
@ -168,6 +185,8 @@ function renderStatus({
);
}
// Main component for Network Panel
function NetworkPanelCont({ panelHeight }: { panelHeight: number }) {
const { player, store } = React.useContext(PlayerContext);
const { sessionStore, uiPlayerStore } = useStore();
@ -216,6 +235,7 @@ function NetworkPanelCont({ panelHeight }: { panelHeight: number }) {
const getTabNum = (tab: string) => tabsArr.findIndex((t) => t === tab) + 1;
const getTabName = (tabId: string) => tabNames[tabId];
return (
<NetworkPanelComp
loadTime={loadTime}
@ -228,8 +248,8 @@ function NetworkPanelCont({ panelHeight }: { panelHeight: number }) {
resourceListNow={resourceListNow}
player={player}
startedAt={startedAt}
websocketList={websocketList as WSMessage[]}
websocketListNow={websocketListNow as WSMessage[]}
websocketList={websocketList}
websocketListNow={websocketListNow}
getTabNum={getTabNum}
getTabName={getTabName}
showSingleTab={showSingleTab}
@ -269,9 +289,7 @@ function MobileNetworkPanelCont({ panelHeight }: { panelHeight: number }) {
resourceListNow={resourceListNow}
player={player}
startedAt={startedAt}
// @ts-ignore
websocketList={websocketList}
// @ts-ignore
websocketListNow={websocketListNow}
zoomEnabled={zoomEnabled}
zoomStartTs={zoomStartTs}
@ -280,12 +298,35 @@ function MobileNetworkPanelCont({ panelHeight }: { panelHeight: number }) {
);
}
type WSMessage = Timed & {
channelName: string;
data: string;
timestamp: number;
dir: 'up' | 'down';
messageType: string;
const useInfiniteScroll = (loadMoreCallback: () => void, hasMore: boolean) => {
const observerRef = useRef<IntersectionObserver>(null);
const loadingRef = useRef<HTMLDivElement>(null);
useEffect(() => {
const observer = new IntersectionObserver(
(entries) => {
if (entries[0]?.isIntersecting && hasMore) {
loadMoreCallback();
}
},
{ threshold: 0.1 },
);
if (loadingRef.current) {
observer.observe(loadingRef.current);
}
// @ts-ignore
observerRef.current = observer;
return () => {
if (observerRef.current) {
observerRef.current.disconnect();
}
};
}, [loadMoreCallback, hasMore, loadingRef]);
return loadingRef;
};
interface Props {
@ -302,8 +343,8 @@ interface Props {
resourceList: Timed[];
fetchListNow: Timed[];
resourceListNow: Timed[];
websocketList: Array<WSMessage>;
websocketListNow: Array<WSMessage>;
websocketList: Array<WsChannel>;
websocketListNow: Array<WsChannel>;
player: WebPlayer | MobilePlayer;
startedAt: number;
isMobile?: boolean;
@ -349,107 +390,189 @@ export const NetworkPanelComp = observer(
>(null);
const { showModal } = useModal();
const [showOnlyErrors, setShowOnlyErrors] = useState(false);
const [isDetailsModalActive, setIsDetailsModalActive] = useState(false);
const [isLoading, setIsLoading] = useState(true);
const [isProcessing, setIsProcessing] = useState(false);
const [displayedItems, setDisplayedItems] = useState([]);
const [totalItems, setTotalItems] = useState(0);
const [summaryStats, setSummaryStats] = useState({
resourcesSize: 0,
transferredSize: 0,
});
const originalListRef = useRef([]);
const socketListRef = useRef([]);
const {
sessionStore: { devTools },
} = useStore();
const { filter } = devTools[INDEX_KEY];
const { activeTab } = devTools[INDEX_KEY];
const activeIndex = activeOutsideIndex ?? devTools[INDEX_KEY].index;
const [inputFilterValue, setInputFilterValue] = useState(filter);
const socketList = useMemo(
() =>
websocketList.filter(
(ws, i, arr) =>
arr.findIndex((it) => it.channelName === ws.channelName) === i,
),
[websocketList],
const debouncedFilter = useCallback(
debounceCall((filterValue) => {
devTools.update(INDEX_KEY, { filter: filterValue });
}, 300),
[],
);
const list = useMemo(
() =>
// TODO: better merge (with body size info) - do it in player
resourceList
.filter(
(res) =>
!fetchList.some((ft) => {
// res.url !== ft.url doesn't work on relative URLs appearing within fetchList (to-fix in player)
if (res.name === ft.name) {
if (res.time === ft.time) return true;
if (res.url.includes(ft.url)) {
return (
Math.abs(res.time - ft.time) < 350 ||
Math.abs(res.timestamp - ft.timestamp) < 350
);
}
}
if (res.name !== ft.name) {
return false;
}
if (Math.abs(res.time - ft.time) > 250) {
return false;
} // TODO: find good epsilons
if (Math.abs(res.duration - ft.duration) > 200) {
return false;
}
return true;
}),
)
.concat(fetchList)
.concat(
socketList.map((ws) => ({
...ws,
type: 'websocket',
method: 'ws',
url: ws.channelName,
name: ws.channelName,
status: '101',
duration: 0,
transferredBodySize: 0,
})),
)
.filter((req) =>
zoomEnabled
? req.time >= zoomStartTs! && req.time <= zoomEndTs!
: true,
)
.sort((a, b) => a.time - b.time),
[resourceList.length, fetchList.length, socketList.length],
);
let filteredList = useMemo(() => {
if (!showOnlyErrors) {
return list;
}
return list.filter(
(it) => parseInt(it.status) >= 400 || !it.success || it.error,
// Process socket lists once
useEffect(() => {
const uniqueSocketList = websocketList.filter(
(ws, i, arr) =>
arr.findIndex((it) => it.channelName === ws.channelName) === i,
);
}, [showOnlyErrors, list]);
filteredList = useRegExListFilterMemo(
filteredList,
(it) => [it.status, it.name, it.type, it.method],
filter,
);
filteredList = useTabListFilterMemo(
filteredList,
(it) => TYPE_TO_TAB[it.type],
ALL,
activeTab,
);
socketListRef.current = uniqueSocketList;
}, [websocketList.length]);
const onTabClick = (activeTab: (typeof TAP_KEYS)[number]) =>
// Initial data processing - do this only once when data changes
useEffect(() => {
setIsLoading(true);
// Heaviest operation here, will create a final merged network list
const processData = async () => {
const fetchUrls = new Set(
fetchList.map((ft) => {
return `${ft.name}-${Math.floor(ft.time / 100)}-${Math.floor(ft.duration / 100)}`;
}),
);
// We want to get resources that aren't in fetch list
const filteredResources = await processInChunks(resourceList, (chunk) =>
chunk.filter((res: any) => {
const key = `${res.name}-${Math.floor(res.time / 100)}-${Math.floor(res.duration / 100)}`;
return !fetchUrls.has(key);
}),
BATCH_SIZE,
25,
);
const processedSockets = socketListRef.current.map((ws: any) => ({
...ws,
type: 'websocket',
method: 'ws',
url: ws.channelName,
name: ws.channelName,
status: '101',
duration: 0,
transferredBodySize: 0,
}));
const mergedList: Timed[] = mergeListsWithZoom(
filteredResources as Timed[],
fetchList,
processedSockets as Timed[],
{ enabled: Boolean(zoomEnabled), start: zoomStartTs ?? 0, end: zoomEndTs ?? 0 }
)
originalListRef.current = mergedList;
setTotalItems(mergedList.length);
calculateResourceStats(resourceList);
// Only display initial chunk
setDisplayedItems(mergedList.slice(0, INITIAL_LOAD_SIZE));
setIsLoading(false);
};
void processData();
}, [
resourceList.length,
fetchList.length,
socketListRef.current.length,
zoomEnabled,
zoomStartTs,
zoomEndTs,
]);
const calculateResourceStats = (resourceList: Record<string, any>) => {
setTimeout(() => {
let resourcesSize = 0
let transferredSize = 0
resourceList.forEach(({ decodedBodySize, headerSize, encodedBodySize }: any) => {
resourcesSize += decodedBodySize || 0
transferredSize += (headerSize || 0) + (encodedBodySize || 0)
})
setSummaryStats({
resourcesSize,
transferredSize,
});
}, 0);
}
useEffect(() => {
if (originalListRef.current.length === 0) return;
setIsProcessing(true);
const applyFilters = async () => {
let filteredItems: any[] = originalListRef.current;
filteredItems = await processInChunks(filteredItems, (chunk) =>
chunk.filter(
(it) => {
let valid = true;
if (showOnlyErrors) {
valid = parseInt(it.status) >= 400 || !it.success || it.error
}
if (filter) {
try {
const regex = new RegExp(filter, 'i');
valid = valid && regex.test(it.status) || regex.test(it.name) || regex.test(it.type) || regex.test(it.method);
} catch (e) {
valid = valid && String(it.status).includes(filter) || it.name.includes(filter) || it.type.includes(filter) || (it.method && it.method.includes(filter));
}
}
if (activeTab !== ALL) {
valid = valid && TYPE_TO_TAB[it.type] === activeTab;
}
return valid;
},
),
);
// Update displayed items
setDisplayedItems(filteredItems.slice(0, INITIAL_LOAD_SIZE));
setTotalItems(filteredItems.length);
setIsProcessing(false);
};
void applyFilters();
}, [filter, activeTab, showOnlyErrors]);
const loadMoreItems = useCallback(() => {
if (isProcessing) return;
setIsProcessing(true);
setTimeout(() => {
setDisplayedItems((prevItems) => {
const currentLength = prevItems.length;
const newItems = originalListRef.current.slice(
currentLength,
currentLength + BATCH_SIZE,
);
return [...prevItems, ...newItems];
});
setIsProcessing(false);
}, 10);
}, [isProcessing]);
const hasMoreItems = displayedItems.length < totalItems;
const loadingRef = useInfiniteScroll(loadMoreItems, hasMoreItems);
const onTabClick = (activeTab) => {
devTools.update(INDEX_KEY, { activeTab });
const onFilterChange = ({
target: { value },
}: React.ChangeEvent<HTMLInputElement>) =>
devTools.update(INDEX_KEY, { filter: value });
};
const onFilterChange = ({ target: { value } }) => {
setInputFilterValue(value)
debouncedFilter(value);
};
// AutoScroll
const [timeoutStartAutoscroll, stopAutoscroll] = useAutoscroll(
filteredList,
displayedItems,
getLastItemTime(fetchListNow, resourceListNow),
activeIndex,
(index) => devTools.update(INDEX_KEY, { index }),
@ -462,24 +585,6 @@ export const NetworkPanelComp = observer(
timeoutStartAutoscroll();
};
const resourcesSize = useMemo(
() =>
resourceList.reduce(
(sum, { decodedBodySize }) => sum + (decodedBodySize || 0),
0,
),
[resourceList.length],
);
const transferredSize = useMemo(
() =>
resourceList.reduce(
(sum, { headerSize, encodedBodySize }) =>
sum + (headerSize || 0) + (encodedBodySize || 0),
0,
),
[resourceList.length],
);
const referenceLines = useMemo(() => {
const arr = [];
@ -513,7 +618,7 @@ export const NetworkPanelComp = observer(
isSpot={isSpot}
time={item.time + startedAt}
resource={item}
rows={filteredList}
rows={displayedItems}
fetchPresented={fetchList.length > 0}
/>,
{
@ -525,12 +630,10 @@ export const NetworkPanelComp = observer(
},
},
);
devTools.update(INDEX_KEY, { index: filteredList.indexOf(item) });
stopAutoscroll();
};
const tableCols = React.useMemo(() => {
const cols: any[] = [
const tableCols = useMemo(() => {
const cols = [
{
label: t('Status'),
dataKey: 'status',
@ -585,7 +688,7 @@ export const NetworkPanelComp = observer(
});
}
return cols;
}, [showSingleTab]);
}, [showSingleTab, activeTab, t, getTabName, getTabNum, isSpot]);
return (
<BottomBlock
@ -617,7 +720,7 @@ export const NetworkPanelComp = observer(
name="filter"
onChange={onFilterChange}
width={280}
value={filter}
value={inputFilterValue}
size="small"
prefix={<SearchOutlined className="text-neutral-400" />}
/>
@ -625,7 +728,7 @@ export const NetworkPanelComp = observer(
</BottomBlock.Header>
<BottomBlock.Content>
<div className="flex items-center justify-between px-4 border-b bg-teal/5 h-8">
<div>
<div className="flex items-center">
<Form.Item name="show-errors-only" className="mb-0">
<label
style={{
@ -642,21 +745,29 @@ export const NetworkPanelComp = observer(
<span className="text-sm ms-2">4xx-5xx Only</span>
</label>
</Form.Item>
{isProcessing && (
<span className="text-xs text-gray-500 ml-4">
Processing data...
</span>
)}
</div>
<InfoLine>
<InfoLine.Point label={`${totalItems}`} value="requests" />
<InfoLine.Point
label={`${filteredList.length}`}
value=" requests"
label={`${displayedItems.length}/${totalItems}`}
value="displayed"
display={displayedItems.length < totalItems}
/>
<InfoLine.Point
label={formatBytes(transferredSize)}
label={formatBytes(summaryStats.transferredSize)}
value="transferred"
display={transferredSize > 0}
display={summaryStats.transferredSize > 0}
/>
<InfoLine.Point
label={formatBytes(resourcesSize)}
label={formatBytes(summaryStats.resourcesSize)}
value="resources"
display={resourcesSize > 0}
display={summaryStats.resourcesSize > 0}
/>
<InfoLine.Point
label={formatMs(domBuildingTime)}
@ -679,42 +790,67 @@ export const NetworkPanelComp = observer(
/>
</InfoLine>
</div>
<NoContent
title={
<div className="capitalize flex items-center gap-2">
<InfoCircleOutlined size={18} />
{t('No Data')}
{isLoading ? (
<div className="flex items-center justify-center h-full">
<div className="text-center">
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-gray-900 mx-auto mb-2"></div>
<p>Processing initial network data...</p>
</div>
}
size="small"
show={filteredList.length === 0}
>
{/* @ts-ignore */}
<TimeTable
rows={filteredList}
tableHeight={panelHeight - 102}
referenceLines={referenceLines}
renderPopup
onRowClick={showDetailsModal}
sortBy="time"
sortAscending
onJump={(row: any) => {
devTools.update(INDEX_KEY, {
index: filteredList.indexOf(row),
});
player.jump(row.time);
}}
activeIndex={activeIndex}
</div>
) : (
<NoContent
title={
<div className="capitalize flex items-center gap-2">
<InfoCircleOutlined size={18} />
{t('No Data')}
</div>
}
size="small"
show={displayedItems.length === 0}
>
{tableCols}
</TimeTable>
{selectedWsChannel ? (
<WSPanel
socketMsgList={selectedWsChannel}
onClose={() => setSelectedWsChannel(null)}
/>
) : null}
</NoContent>
<div>
<TimeTable
rows={displayedItems}
tableHeight={panelHeight - 102 - (hasMoreItems ? 30 : 0)}
referenceLines={referenceLines}
renderPopup
onRowClick={showDetailsModal}
sortBy="time"
sortAscending
onJump={(row) => {
devTools.update(INDEX_KEY, {
index: displayedItems.indexOf(row),
});
player.jump(row.time);
}}
activeIndex={activeIndex}
>
{tableCols}
</TimeTable>
{hasMoreItems && (
<div
ref={loadingRef}
className="flex justify-center items-center text-xs text-gray-500"
>
<div className="flex items-center">
<div className="animate-spin rounded-full h-4 w-4 border-b-2 border-gray-600 mr-2"></div>
Loading more data ({totalItems - displayedItems.length}{' '}
remaining)
</div>
</div>
)}
</div>
{selectedWsChannel ? (
<WSPanel
socketMsgList={selectedWsChannel}
onClose={() => setSelectedWsChannel(null)}
/>
) : null}
</NoContent>
)}
</BottomBlock.Content>
</BottomBlock>
);
@ -722,7 +858,6 @@ export const NetworkPanelComp = observer(
);
const WebNetworkPanel = observer(NetworkPanelCont);
const MobileNetworkPanel = observer(MobileNetworkPanelCont);
export { WebNetworkPanel, MobileNetworkPanel };

View file

@ -0,0 +1,178 @@
export function mergeListsWithZoom<
T extends Record<string, any>,
Y extends Record<string, any>,
Z extends Record<string, any>,
>(
arr1: T[],
arr2: Y[],
arr3: Z[],
zoom?: { enabled: boolean; start: number; end: number },
): Array<T | Y | Z> {
// Early return for empty arrays
if (arr1.length === 0 && arr2.length === 0 && arr3.length === 0) {
return [];
}
// Optimized for common case - no zoom
if (!zoom?.enabled) {
return mergeThreeSortedArrays(arr1, arr2, arr3);
}
// Binary search for start indexes (faster than linear search for large arrays)
const index1 = binarySearchStartIndex(arr1, zoom.start);
const index2 = binarySearchStartIndex(arr2, zoom.start);
const index3 = binarySearchStartIndex(arr3, zoom.start);
// Merge arrays within zoom range
return mergeThreeSortedArraysWithinRange(
arr1,
arr2,
arr3,
index1,
index2,
index3,
zoom.start,
zoom.end,
);
}
function binarySearchStartIndex<T extends Record<string, any>>(
arr: T[],
threshold: number,
): number {
if (arr.length === 0) return 0;
let low = 0;
let high = arr.length - 1;
// Handle edge cases first for better performance
if (arr[high].time < threshold) return arr.length;
if (arr[low].time >= threshold) return 0;
while (low <= high) {
const mid = Math.floor((low + high) / 2);
if (arr[mid].time < threshold) {
low = mid + 1;
} else {
high = mid - 1;
}
}
return low;
}
function mergeThreeSortedArrays<
T extends Record<string, any>,
Y extends Record<string, any>,
Z extends Record<string, any>,
>(arr1: T[], arr2: Y[], arr3: Z[]): Array<T | Y | Z> {
const totalLength = arr1.length + arr2.length + arr3.length;
// prealloc array size
const result = new Array(totalLength);
let i = 0,
j = 0,
k = 0,
index = 0;
while (i < arr1.length || j < arr2.length || k < arr3.length) {
const val1 = i < arr1.length ? arr1[i].time : Infinity;
const val2 = j < arr2.length ? arr2[j].time : Infinity;
const val3 = k < arr3.length ? arr3[k].time : Infinity;
if (val1 <= val2 && val1 <= val3) {
result[index++] = arr1[i++];
} else if (val2 <= val1 && val2 <= val3) {
result[index++] = arr2[j++];
} else {
result[index++] = arr3[k++];
}
}
return result;
}
// same as above, just with zoom stuff
function mergeThreeSortedArraysWithinRange<
T extends Record<string, any>,
Y extends Record<string, any>,
Z extends Record<string, any>,
>(
arr1: T[],
arr2: Y[],
arr3: Z[],
startIdx1: number,
startIdx2: number,
startIdx3: number,
start: number,
end: number,
): Array<T | Y | Z> {
// we don't know beforehand how many items will be there
const result = [];
let i = startIdx1;
let j = startIdx2;
let k = startIdx3;
while (i < arr1.length || j < arr2.length || k < arr3.length) {
const val1 = i < arr1.length ? arr1[i].time : Infinity;
const val2 = j < arr2.length ? arr2[j].time : Infinity;
const val3 = k < arr3.length ? arr3[k].time : Infinity;
// Early termination: if all remaining values exceed end time
if (Math.min(val1, val2, val3) > end) {
break;
}
if (val1 <= val2 && val1 <= val3) {
if (val1 <= end) {
result.push(arr1[i]);
}
i++;
} else if (val2 <= val1 && val2 <= val3) {
if (val2 <= end) {
result.push(arr2[j]);
}
j++;
} else {
if (val3 <= end) {
result.push(arr3[k]);
}
k++;
}
}
return result;
}
export function processInChunks(
items: any[],
processFn: (item: any) => any,
chunkSize = 1000,
overscan = 0,
) {
return new Promise((resolve) => {
if (items.length === 0) {
resolve([]);
return;
}
let result: any[] = [];
let index = 0;
const processNextChunk = () => {
const chunk = items.slice(index, index + chunkSize + overscan);
result = result.concat(processFn(chunk));
index += chunkSize;
if (index < items.length) {
setTimeout(processNextChunk, 0);
} else {
resolve(result);
}
};
processNextChunk();
});
}

View file

@ -5,6 +5,7 @@ import cn from 'classnames';
import { Loader } from 'UI';
import OutsideClickDetectingDiv from 'Shared/OutsideClickDetectingDiv';
import { useTranslation } from 'react-i18next';
import { VList } from 'virtua';
function TruncatedText({
text,
@ -124,7 +125,7 @@ export function AutocompleteModal({
if (index === blocksAmount - 1 && blocksAmount > 1) {
str += ' and ';
}
str += `"${block.trim()}"`;
str += block.trim();
if (index < blocksAmount - 2) {
str += ', ';
}
@ -170,25 +171,27 @@ export function AutocompleteModal({
<>
<div
className="flex flex-col gap-2 overflow-y-auto py-2 overflow-x-hidden text-ellipsis"
style={{ maxHeight: 200 }}
style={{ height: Math.min(sortedOptions.length * 32, 240) }}
>
{sortedOptions.map((item) => (
<div
key={item.value}
onClick={() => onSelectOption(item)}
className="cursor-pointer w-full py-1 hover:bg-active-blue rounded px-2"
>
<Checkbox checked={isSelected(item)} /> {item.label}
</div>
))}
<VList count={sortedOptions.length} itemSize={18}>
{sortedOptions.map((item) => (
<div
key={item.value}
onClick={() => onSelectOption(item)}
className="cursor-pointer w-full py-1 hover:bg-active-blue rounded px-2"
>
<Checkbox checked={isSelected(item)} /> {item.label}
</div>
))}
</VList>
</div>
{query.length ? (
<div className="border-y border-y-gray-light py-2">
<div
className="whitespace-normal rounded cursor-pointer text-teal hover:bg-active-blue px-2 py-1"
className="whitespace-nowrap truncate w-full rounded cursor-pointer text-teal hover:bg-active-blue px-2 py-1"
onClick={applyQuery}
>
{t('Apply')}&nbsp;{queryStr}
{t('Apply')}&nbsp;<span className='font-semibold'>{queryStr}</span>
</div>
</div>
) : null}

View file

@ -128,8 +128,10 @@ const FilterAutoComplete = observer(
};
const handleFocus = () => {
if (!initialFocus) {
setOptions(topValues.map((i) => ({ value: i.value, label: i.value })));
}
setInitialFocus(true);
setOptions(topValues.map((i) => ({ value: i.value, label: i.value })));
};
return (

View file

@ -9,8 +9,10 @@ function LiveSessionSearch() {
const appliedFilter = searchStoreLive.instance;
useEffect(() => {
void searchStoreLive.fetchSessions();
}, []);
if (projectsStore.activeSiteId) {
void searchStoreLive.fetchSessions(true);
}
}, [projectsStore.activeSiteId])
const onAddFilter = (filter: any) => {
filter.autoOpen = true;

View file

@ -53,9 +53,6 @@ function SessionFilters() {
onBeforeLoad: async () => {
await reloadTags();
},
onLoaded: () => {
debounceFetch = debounce(() => searchStore.fetchSessions(), 500);
}
});
const onAddFilter = (filter: any) => {

View file

@ -19,11 +19,13 @@ export default function MetaItem(props: Props) {
<TextEllipsis
text={label}
className="p-0"
maxWidth={'300px'}
popupProps={{ size: 'small', disabled: true }}
/>
<span className="bg-neutral-200 inline-block w-[1px] min-h-[17px]"></span>
<TextEllipsis
text={value}
maxWidth={'350px'}
className="p-0 text-neutral-500"
popupProps={{ size: 'small', disabled: true }}
/>

View file

@ -7,13 +7,15 @@ interface Props {
className?: string;
metaList: any[];
maxLength?: number;
onMetaClick?: (meta: { name: string, value: string }) => void;
horizontal?: boolean;
}
export default function SessionMetaList(props: Props) {
const { className = '', metaList, maxLength = 14 } = props;
const { className = '', metaList, maxLength = 14, horizontal = false } = props;
return (
<div className={cn('flex items-center flex-wrap gap-1', className)}>
<div className={cn('flex items-center gap-1', horizontal ? '' : 'flex-wrap', className)}>
{metaList.slice(0, maxLength).map(({ label, value }, index) => (
<React.Fragment key={index}>
<MetaItem label={label} value={`${value}`} />

View file

@ -5,6 +5,7 @@ import ListingVisibility from './components/ListingVisibility';
import DefaultPlaying from './components/DefaultPlaying';
import DefaultTimezone from './components/DefaultTimezone';
import CaptureRate from './components/CaptureRate';
import { useTranslation } from 'react-i18next';
function SessionSettings() {

View file

@ -0,0 +1,30 @@
import React from 'react';
import { useStore } from 'App/mstore';
import { observer } from 'mobx-react-lite';
import { Switch } from 'UI';
import { useTranslation } from 'react-i18next';
function VirtualModeSettings() {
const { settingsStore } = useStore();
const { sessionSettings } = settingsStore;
const { virtualMode } = sessionSettings;
const { t } = useTranslation();
const updateSettings = (checked: boolean) => {
settingsStore.sessionSettings.updateKey('virtualMode', !virtualMode);
};
return (
<div>
<h3 className="text-lg">{t('Virtual Mode')}</h3>
<div className="my-1">
{t('Change this setting if you have issues with recordings containing Lightning Web Components (or similar custom HTML Element libraries).')}
</div>
<div className="mt-2">
<Switch onChange={updateSettings} checked={virtualMode} />
</div>
</div>
);
}
export default observer(VirtualModeSettings);

View file

@ -5,6 +5,15 @@ import { Tooltip } from 'antd';
import cn from 'classnames';
import { useTranslation } from 'react-i18next';
interface Props {
code?: string;
extra?: string;
language?: string;
copy?: boolean;
width?: string;
height?: string;
}
export default function CodeBlock({
code = '',
extra = '',
@ -12,7 +21,7 @@ export default function CodeBlock({
copy = false,
width = undefined,
height = undefined,
}) {
}: Props) {
const { t } = useTranslation();
useEffect(() => {
setTimeout(() => {

View file

@ -9,6 +9,7 @@ export const GLOBAL_HAS_NO_RECORDINGS = '__$global-hasNoRecordings$__';
export const SITE_ID_STORAGE_KEY = '__$user-siteId$__';
export const GETTING_STARTED = '__$user-gettingStarted$__';
export const MOUSE_TRAIL = '__$session-mouseTrail$__';
export const VIRTUAL_MODE_KEY = '__$session-virtualMode$__'
export const IFRAME = '__$session-iframe$__';
export const JWT_PARAM = '__$session-jwt-param$__';
export const MENU_COLLAPSED = '__$global-menuCollapsed$__';

View file

@ -49,13 +49,8 @@ const useSessionSearchQueryHandler = ({
searchStore.applyFilter(filter, true);
}
// Important: Mark URL as parsed BEFORE fetching
// This prevents the initial fetch when the URL is parsed
searchStore.setUrlParsed();
// Then fetch sessions - this is the only place that should fetch initially
await searchStore.fetchSessions();
onLoaded();
onLoaded?.();
} catch (error) {
console.error('Error applying filter from query:', error);
searchStore.setUrlParsed();

View file

@ -255,7 +255,7 @@ function SideMenu(props: Props) {
<Tag
color="cyan"
bordered={false}
className="text-xs"
className="text-xs ml-2"
>
{t('Beta')}
</Tag>

View file

@ -503,7 +503,7 @@
"Returning users between": "Returning users between",
"Sessions": "Sessions",
"No recordings found.": "No recordings found.",
"Get new session": "Get new session",
"Get new image": "Get new image",
"The number of cards in one dashboard is limited to 30.": "The number of cards in one dashboard is limited to 30.",
"Add Card": "Add Card",
"Create Dashboard": "Create Dashboard",

View file

@ -503,7 +503,7 @@
"Returning users between": "Usuarios recurrentes entre",
"Sessions": "Sesiones",
"No recordings found.": "No se encontraron grabaciones.",
"Get new session": "Obtener nueva sesión",
"Get new image": "Obtener nueva sesión",
"The number of cards in one dashboard is limited to 30.": "El número de tarjetas en un panel está limitado a 30.",
"Add Card": "Agregar tarjeta",
"Create Dashboard": "Crear panel",

View file

@ -503,7 +503,7 @@
"Returning users between": "Utilisateurs récurrents entre",
"Sessions": "Sessions",
"No recordings found.": "Aucun enregistrement trouvé.",
"Get new session": "Obtenir une nouvelle session",
"Get new image": "Obtenir une nouvelle session",
"The number of cards in one dashboard is limited to 30.": "Le nombre de cartes dans un tableau de bord est limité à 30.",
"Add Card": "Ajouter une carte",
"Create Dashboard": "Créer un tableau de bord",

View file

@ -504,7 +504,7 @@
"Returning users between": "Возвращающиеся пользователи за период",
"Sessions": "Сессии",
"No recordings found.": "Записей не найдено.",
"Get new session": "Получить новую сессию",
"Get new image": "Получить новую сессию",
"The number of cards in one dashboard is limited to 30.": "Количество карточек в одном дашборде ограничено 30.",
"Add Card": "Добавить карточку",
"Create Dashboard": "Создать дашборд",
@ -1498,5 +1498,8 @@
"More attribute": "Еще атрибут",
"More attributes": "Еще атрибуты",
"Account settings updated successfully": "Настройки аккаунта успешно обновлены",
"Include rage clicks": "Включить невыносимые клики"
}
"Include rage clicks": "Включить невыносимые клики",
"Interface Language": "Язык интерфейса",
"Select the language in which OpenReplay will appear.": "Выберите язык, на котором будет отображаться OpenReplay.",
"Language": "Язык"
}

View file

@ -503,7 +503,7 @@
"Returning users between": "回访用户区间",
"Sessions": "会话",
"No recordings found.": "未找到录制。",
"Get new session": "获取新会话",
"Get new image": "获取新会话",
"The number of cards in one dashboard is limited to 30.": "一个仪表板最多可包含30个卡片。",
"Add Card": "添加卡片",
"Create Dashboard": "创建仪表板",

View file

@ -1,11 +1,13 @@
import { makeAutoObservable, runInAction } from 'mobx';
import { makeAutoObservable, runInAction, reaction } from 'mobx';
import { dashboardService, metricService } from 'App/services';
import { toast } from 'react-toastify';
import Period, { LAST_24_HOURS, LAST_7_DAYS } from 'Types/app/period';
import Period, { LAST_24_HOURS } from 'Types/app/period';
import { getRE } from 'App/utils';
import Filter from './types/filter';
import Widget from './types/widget';
import Dashboard from './types/dashboard';
import { calculateGranularities } from '@/components/Dashboard/components/WidgetDateRange/RangeGranularity';
import { CUSTOM_RANGE } from '@/dateRange';
interface DashboardFilter {
query?: string;
@ -34,9 +36,9 @@ export default class DashboardStore {
comparisonFilter: Filter = new Filter();
drillDownPeriod: Record<string, any> = Period({ rangeName: LAST_7_DAYS });
drillDownPeriod: Record<string, any> = Period({ rangeName: LAST_24_HOURS });
selectedDensity: number = 7; // depends on default drilldown, 7 points here!!!;
selectedDensity: number = 7;
comparisonPeriods: Record<string, any> = {};
@ -83,10 +85,29 @@ export default class DashboardStore {
makeAutoObservable(this);
this.resetDrillDownFilter();
this.createDensity(this.period.getDuration());
reaction(
() => this.period,
(period) => {
this.createDensity(period.getDuration());
},
);
}
setDensity = (density: any) => {
this.selectedDensity = parseInt(density, 10);
resetDensity = () => {
this.createDensity(this.period.getDuration());
};
createDensity = (duration: number) => {
const densityOpts = calculateGranularities(duration);
const defaultOption = densityOpts[densityOpts.length - 2];
this.setDensity(defaultOption.key);
};
setDensity = (density: number) => {
this.selectedDensity = density;
};
get sortedDashboards() {
@ -446,7 +467,7 @@ export default class DashboardStore {
this.isSaving = true;
try {
try {
const response = await dashboardService.addWidget(dashboard, metricIds);
await dashboardService.addWidget(dashboard, metricIds);
toast.success('Card added to dashboard.');
} catch {
toast.error('Card could not be added.');
@ -456,6 +477,17 @@ export default class DashboardStore {
}
}
resetPeriod = () => {
if (this.period) {
const range = this.period.rangeName;
if (range !== CUSTOM_RANGE) {
this.period = Period({ rangeName: this.period.rangeName });
} else {
this.period = Period({ rangeName: LAST_24_HOURS });
}
}
};
setPeriod(period: any) {
this.period = Period({
start: period.start,

View file

@ -1,6 +1,5 @@
import { makeAutoObservable } from 'mobx';
import { issueReportsService } from 'App/services';
import { makePersistable } from '.store/mobx-persist-store-virtual-858ce4d906/package';
import ReportedIssue from '../types/session/assignment';
export default class IssueReportingStore {

View file

@ -4,7 +4,6 @@ import {
SITE_ID_STORAGE_KEY,
} from 'App/constants/storageKeys';
import { projectsService } from 'App/services';
import { toast } from '.store/react-toastify-virtual-9dd0f3eae1/package';
import GDPR from './types/gdpr';
import Project from './types/project';

View file

@ -390,10 +390,11 @@ class SearchStore {
// TODO
}
async fetchSessions(
fetchSessions = async (
force: boolean = false,
bookmarked: boolean = false,
): Promise<void> {
): Promise<void> => {
console.log(this.searchInProgress)
if (this.searchInProgress) return;
const filter = this.instance.toSearch();

View file

@ -220,6 +220,7 @@ class SearchStoreLive {
updateFilter = (index: number, search: Partial<IFilter>) => {
const newFilters = this.instance.filters.map((_filter: any, i: any) => {
if (i === index) {
search.value = checkFilterValue(search.value);
return search;
}
return _filter;

View file

@ -15,9 +15,7 @@ import { loadFile } from 'App/player/web/network/loadFiles';
import { LAST_7_DAYS } from 'Types/app/period';
import { filterMap } from 'App/mstore/searchStore';
import { getDateRangeFromValue } from 'App/dateRange';
import { clean as cleanParams } from '../api_client';
import { searchStore, searchStoreLive } from './index';
const range = getDateRangeFromValue(LAST_7_DAYS);
const defaultDateFilters = {

View file

@ -157,7 +157,7 @@ export default class FilterItem {
const json = {
type: isMetadata ? FilterKey.METADATA : this.key,
isEvent: Boolean(this.isEvent),
value: this.value.map((i: any) => (i ? i.toString() : '')),
value: this.value?.map((i: any) => (i ? i.toString() : '')) || [],
operator: this.operator,
source: isMetadata ? this.key.replace(/^_/, '') : this.source,
sourceOperator: this.sourceOperator,

View file

@ -7,6 +7,7 @@ import Filter, { IFilter } from 'App/mstore/types/filter';
import FilterItem from 'App/mstore/types/filterItem';
import { makeAutoObservable, observable } from 'mobx';
import { LAST_24_HOURS, LAST_30_DAYS, LAST_7_DAYS } from 'Types/app/period';
import { roundToNextMinutes } from '@/utils';
// @ts-ignore
const rangeValue = DATE_RANGE_VALUES.LAST_24_HOURS;
@ -177,6 +178,7 @@ export default class Search {
js.rangeValue,
js.startDate,
js.endDate,
15,
);
js.startDate = startDate;
js.endDate = endDate;
@ -190,12 +192,11 @@ export default class Search {
rangeName: string,
customStartDate: number,
customEndDate: number,
): {
startDate: number;
endDate: number;
} {
roundMinutes?: number,
): { startDate: number; endDate: number } {
let endDate = new Date().getTime();
let startDate: number;
const minutes = roundMinutes || 15;
switch (rangeName) {
case LAST_7_DAYS:
@ -206,9 +207,7 @@ export default class Search {
break;
case CUSTOM_RANGE:
if (!customStartDate || !customEndDate) {
throw new Error(
'Start date and end date must be provided for CUSTOM_RANGE.',
);
throw new Error('Start date and end date must be provided for CUSTOM_RANGE.');
}
startDate = customStartDate;
endDate = customEndDate;
@ -218,10 +217,12 @@ export default class Search {
startDate = endDate - 24 * 60 * 60 * 1000;
}
return {
startDate,
endDate,
};
if (rangeName !== CUSTOM_RANGE) {
startDate = roundToNextMinutes(startDate, minutes);
endDate = roundToNextMinutes(endDate, minutes);
}
return { startDate, endDate };
}
fromJS({ eventsOrder, filters, events, custom, ...filterData }: any) {

View file

@ -6,6 +6,7 @@ import {
SHOWN_TIMEZONE,
DURATION_FILTER,
MOUSE_TRAIL,
VIRTUAL_MODE_KEY,
} from 'App/constants/storageKeys';
import { DateTime, Settings } from 'luxon';
@ -71,27 +72,19 @@ export const generateGMTZones = (): Timezone[] => {
export default class SessionSettings {
defaultTimezones = [...generateGMTZones()];
skipToIssue: boolean = localStorage.getItem(SKIP_TO_ISSUE) === 'true';
timezone: Timezone;
durationFilter: any = JSON.parse(
localStorage.getItem(DURATION_FILTER) ||
JSON.stringify(defaultDurationFilter),
);
captureRate: string = '0';
conditionalCapture: boolean = false;
captureConditions: { name: string; captureRate: number; filters: any[] }[] =
[];
mouseTrail: boolean = localStorage.getItem(MOUSE_TRAIL) !== 'false';
shownTimezone: 'user' | 'local';
virtualMode: boolean = localStorage.getItem(VIRTUAL_MODE_KEY) === 'true';
usingLocal: boolean = false;
constructor() {

View file

@ -163,6 +163,7 @@ export default class Widget {
fromJson(json: any, period?: any) {
json.config = json.config || {};
runInAction(() => {
this.dashboardId = json.dashboardId;
this.metricId = json.metricId;
this.widgetId = json.widgetId;
this.metricValue = this.metricValueFromArray(

View file

@ -114,12 +114,14 @@ class UserStore {
get isEnterprise() {
return (
this.account?.edition === 'ee' ||
this.account?.edition === 'msaas' ||
this.authStore.authDetails?.edition === 'ee' ||
this.authStore.authDetails?.edition === 'msaas'
this.authStore.authDetails?.edition === 'ee'
);
}
get isSSOSupported() {
return this.isEnterprise || this.account?.edition === 'msaas' || this.authStore.authDetails?.edition === 'msaas';
}
get isLoggedIn() {
return Boolean(this.jwt);
}

View file

@ -124,13 +124,9 @@ export default class ListWalker<T extends Timed> {
* Assumed that the current message is already handled so
* if pointer doesn't change <null> is returned.
*/
moveGetLast(t: number, index?: number): T | null {
let key: string = 'time'; // TODO
let val = t;
if (index) {
key = '_index';
val = index;
}
moveGetLast(t: number, index?: number, force?: boolean, debug?: boolean): T | null {
const key: string = index ? '_index' : 'time';
const val = index ? index : t;
let changed = false;
// @ts-ignore
@ -143,7 +139,10 @@ export default class ListWalker<T extends Timed> {
this.movePrev();
changed = true;
}
return changed ? this.list[this.p - 1] : null;
if (debug) {
console.log(this.list[this.p - 1])
}
return changed || force ? this.list[this.p - 1] : null;
}
prevTs = 0;

View file

@ -43,27 +43,7 @@ export default class MessageLoader {
this.session = session;
}
/**
* TODO: has to be moved out of messageLoader logic somehow
* */
spriteMapSvg: SVGElement | null = null;
potentialSpriteMap: Record<string, any> = {};
domParser: DOMParser | null = null;
createSpriteMap = () => {
if (!this.spriteMapSvg) {
this.domParser = new DOMParser();
this.spriteMapSvg = document.createElementNS(
'http://www.w3.org/2000/svg',
'svg',
);
this.spriteMapSvg.setAttribute('style', 'display: none;');
this.spriteMapSvg.setAttribute('id', 'reconstructed-sprite');
}
};
rawMessages: any[] = []
createNewParser(
shouldDecrypt = true,
onMessagesDone: (msgs: PlayerMsg[], file?: string) => void,
@ -90,6 +70,7 @@ export default class MessageLoader {
while (!finished) {
const msg = fileReader.readNext();
if (msg) {
this.rawMessages.push(msg)
msgs.push(msg);
} else {
finished = true;
@ -99,23 +80,7 @@ export default class MessageLoader {
let artificialStartTime = Infinity;
let startTimeSet = false;
msgs.forEach((msg, i) => {
if (msg.tp === MType.SetNodeAttribute) {
if (msg.value.includes('_$OPENREPLAY_SPRITE$_')) {
this.createSpriteMap();
if (!this.domParser) {
return console.error('DOM parser is not initialized?');
}
handleSprites(
this.potentialSpriteMap,
this.domParser,
msg,
this.spriteMapSvg!,
i,
);
}
}
if (msg.tp === MType.Redux || msg.tp === MType.ReduxDeprecated) {
if ('actionTime' in msg && msg.actionTime) {
msg.time = msg.actionTime - this.session.startedAt;
@ -333,10 +298,6 @@ export default class MessageLoader {
await Promise.allSettled([restDomFilesPromise, restDevtoolsFilesPromise]);
this.messageManager.onFileReadSuccess();
// no sprites for mobile
if (this.spriteMapSvg && 'injectSpriteMap' in this.messageManager) {
this.messageManager.injectSpriteMap(this.spriteMapSvg);
}
};
loadEFSMobs = async () => {
@ -383,27 +344,32 @@ const DOMMessages = [
MType.CreateElementNode,
MType.CreateTextNode,
MType.MoveNode,
MType.RemoveNode,
MType.CreateIFrameDocument,
];
// fixed times: 3
function brokenDomSorter(m1: PlayerMsg, m2: PlayerMsg) {
if (m1.time !== m2.time) return m1.time - m2.time;
if (m1.tp === MType.CreateDocument && m2.tp !== MType.CreateDocument)
return -1;
if (m1.tp !== MType.CreateDocument && m2.tp === MType.CreateDocument)
return 1;
// if (m1.tp === MType.CreateDocument && m2.tp !== MType.CreateDocument)
// return -1;
// if (m1.tp !== MType.CreateDocument && m2.tp === MType.CreateDocument)
// return 1;
const m1IsDOM = DOMMessages.includes(m1.tp);
const m2IsDOM = DOMMessages.includes(m2.tp);
if (m1IsDOM && m2IsDOM) {
// @ts-ignore DOM msg has id but checking for 'id' in m is expensive
return m1.id - m2.id;
}
// if (m1.tp === MType.RemoveNode)
// return 1;
// if (m2.tp === MType.RemoveNode)
// return -1;
if (m1IsDOM && !m2IsDOM) return -1;
if (!m1IsDOM && m2IsDOM) return 1;
// const m1IsDOM = DOMMessages.includes(m1.tp);
// const m2IsDOM = DOMMessages.includes(m2.tp);
// if (m1IsDOM && m2IsDOM) {
// // @ts-ignore DOM msg has id but checking for 'id' in m is expensive
// return m1.id - m2.id;
// }
// if (m1IsDOM && !m2IsDOM) return -1;
// if (!m1IsDOM && m2IsDOM) return 1;
return 0;
}
@ -467,40 +433,6 @@ function findBrokenNodes(nodes: any[]) {
return result;
}
function handleSprites(
potentialSpriteMap: Record<string, any>,
parser: DOMParser,
msg: Record<string, any>,
spriteMapSvg: SVGElement,
i: number,
) {
const [_, svgData] = msg.value.split('_$OPENREPLAY_SPRITE$_');
const potentialSprite = potentialSpriteMap[svgData];
if (potentialSprite) {
msg.value = potentialSprite;
} else {
const svgDoc = parser.parseFromString(svgData, 'image/svg+xml');
const originalSvg = svgDoc.querySelector('svg');
if (originalSvg) {
const symbol = document.createElementNS(
'http://www.w3.org/2000/svg',
'symbol',
);
const symbolId = `symbol-${msg.id || `ind-${i}`}`; // Generate an ID if missing
symbol.setAttribute('id', symbolId);
symbol.setAttribute(
'viewBox',
originalSvg.getAttribute('viewBox') || '0 0 24 24',
);
symbol.innerHTML = originalSvg.innerHTML;
spriteMapSvg.appendChild(symbol);
msg.value = `#${symbolId}`;
potentialSpriteMap[svgData] = `#${symbolId}`;
}
}
}
// @ts-ignore
window.searchOrphans = (msgs) =>
findBrokenNodes(msgs.filter((m) => [8, 9, 10, 70].includes(m.tp)));

View file

@ -1,7 +1,7 @@
// @ts-ignore
import { Decoder } from 'syncod';
import logger from 'App/logger';
import { VIRTUAL_MODE_KEY } from '@/constants/storageKeys';
import type { Store, ILog, SessionFilesInfo } from 'Player';
import TabSessionManager, { TabState } from 'Player/web/TabManager';
import ActiveTabManager from 'Player/web/managers/ActiveTabManager';
@ -69,6 +69,7 @@ export interface State extends ScreenState {
tabChangeEvents: TabChangeEvent[];
closedTabs: string[];
sessionStart: number;
vModeBadge: boolean;
}
export const visualChanges = [
@ -99,6 +100,7 @@ export default class MessageManager {
closedTabs: [],
sessionStart: 0,
tabNames: {},
vModeBadge: false,
};
private clickManager: ListWalker<MouseClick> = new ListWalker();
@ -126,7 +128,6 @@ export default class MessageManager {
private tabsAmount = 0;
private tabChangeEvents: TabChangeEvent[] = [];
private activeTab = '';
constructor(
@ -142,8 +143,19 @@ export default class MessageManager {
this.activityManager = new ActivityManager(
this.session.duration.milliseconds,
); // only if not-live
const vMode = localStorage.getItem(VIRTUAL_MODE_KEY);
if (vMode === 'true') {
this.setVirtualMode(true);
}
}
private virtualMode = false;
public setVirtualMode = (virtualMode: boolean) => {
this.virtualMode = virtualMode;
Object.values(this.tabs).forEach((tab) => tab.setVirtualMode(virtualMode));
};
public getListsFullState = () => {
const fullState: Record<string, any> = {};
for (const tab in Object.keys(this.tabs)) {
@ -201,8 +213,16 @@ export default class MessageManager {
}
Object.values(this.tabs).forEach((tab) => tab.onFileReadSuccess?.());
this.updateSpriteMap();
};
public updateSpriteMap = () => {
if (this.spriteMapSvg) {
this.injectSpriteMap(this.spriteMapSvg);
}
}
public onFileReadFailed = (...e: any[]) => {
logger.error(e);
this.state.update({ error: true });
@ -288,15 +308,17 @@ export default class MessageManager {
}
if (tabId) {
const stateUpdate: { currentTab?: string, tabs?: Set<string> } = {}
if (this.activeTab !== tabId) {
this.state.update({ currentTab: tabId });
stateUpdate['currentTab'] = tabId;
this.activeTab = tabId;
this.tabs[this.activeTab].clean();
}
const activeTabs = this.state.get().tabs;
if (activeTabs.size !== this.activeTabManager.tabInstances.size) {
this.state.update({ tabs: this.activeTabManager.tabInstances });
stateUpdate['tabs'] = this.activeTabManager.tabInstances;
}
this.state.update(stateUpdate)
}
if (this.tabs[this.activeTab]) {
@ -335,9 +357,38 @@ export default class MessageManager {
this.state.update({ tabChangeEvents: this.tabChangeEvents });
}
spriteMapSvg: SVGElement | null = null;
potentialSpriteMap: Record<string, any> = {};
domParser: DOMParser | null = null;
createSpriteMap = () => {
if (!this.spriteMapSvg) {
this.domParser = new DOMParser();
this.spriteMapSvg = document.createElementNS(
'http://www.w3.org/2000/svg',
'svg',
);
this.spriteMapSvg.setAttribute('style', 'display: none;');
this.spriteMapSvg.setAttribute('id', 'reconstructed-sprite');
}
};
distributeMessage = (msg: Message & { tabId: string }): void => {
// @ts-ignore placeholder msg for timestamps
if (msg.tp === 9999) return;
if (msg.tp === MType.SetNodeAttribute) {
if (msg.value.includes('_$OPENREPLAY_SPRITE$_')) {
this.createSpriteMap();
if (!this.domParser) {
return console.error('DOM parser is not initialized?');
}
handleSprites(
this.potentialSpriteMap,
this.domParser,
msg,
this.spriteMapSvg!,
);
}
}
if (!this.tabs[msg.tabId]) {
this.tabsAmount++;
this.state.update({
@ -355,6 +406,9 @@ export default class MessageManager {
this.sessionStart,
this.initialLists,
);
if (this.virtualMode) {
this.tabs[msg.tabId].setVirtualMode(this.virtualMode);
}
}
const lastMessageTime = Math.max(msg.time, this.lastMessageTime);
@ -452,3 +506,36 @@ function mapTabs(tabs: Record<string, TabSessionManager>) {
return tabMap;
}
function handleSprites(
potentialSpriteMap: Record<string, any>,
parser: DOMParser,
msg: Record<string, any>,
spriteMapSvg: SVGElement,
) {
const [_, svgData] = msg.value.split('_$OPENREPLAY_SPRITE$_');
const potentialSprite = potentialSpriteMap[svgData];
if (potentialSprite) {
msg.value = potentialSprite;
} else {
const svgDoc = parser.parseFromString(svgData, 'image/svg+xml');
const originalSvg = svgDoc.querySelector('svg');
if (originalSvg) {
const symbol = document.createElementNS(
'http://www.w3.org/2000/svg',
'symbol',
);
const symbolId = `symbol-${msg.id || `ind-${msg.time}`}`; // Generate an ID if missing
symbol.setAttribute('id', symbolId);
symbol.setAttribute(
'viewBox',
originalSvg.getAttribute('viewBox') || '0 0 24 24',
);
symbol.innerHTML = originalSvg.innerHTML;
spriteMapSvg.appendChild(symbol);
msg.value = `#${symbolId}`;
potentialSpriteMap[svgData] = `#${symbolId}`;
}
}
}

View file

@ -98,6 +98,8 @@ export default class TabSessionManager {
private readonly state: Store<{
tabStates: { [tabId: string]: TabState };
tabNames: { [tabId: string]: string };
location?: string;
vModeBadge?: boolean;
}>,
private readonly screen: Screen,
private readonly id: string,
@ -115,6 +117,13 @@ export default class TabSessionManager {
screen,
this.session.isMobile,
this.setCSSLoading,
() => {
setTimeout(() => {
this.state.update({
vModeBadge: true,
})
}, 0)
}
);
this.lists = new Lists(initialLists);
initialLists?.event?.forEach((e: Record<string, string>) => {
@ -125,6 +134,10 @@ export default class TabSessionManager {
});
}
public setVirtualMode = (virtualMode: boolean) => {
this.pagesManager.setVirtualMode(virtualMode);
};
setSession = (session: any) => {
this.session = session;
};
@ -347,19 +360,19 @@ export default class TabSessionManager {
break;
case MType.CreateTextNode:
case MType.CreateElementNode:
this.windowNodeCounter.addNode(msg.id, msg.parentID);
this.windowNodeCounter.addNode(msg);
this.performanceTrackManager.setCurrentNodesCount(
this.windowNodeCounter.count,
);
break;
case MType.MoveNode:
this.windowNodeCounter.moveNode(msg.id, msg.parentID);
this.windowNodeCounter.moveNode(msg);
this.performanceTrackManager.setCurrentNodesCount(
this.windowNodeCounter.count,
);
break;
case MType.RemoveNode:
this.windowNodeCounter.removeNode(msg.id);
this.windowNodeCounter.removeNode(msg);
this.performanceTrackManager.setCurrentNodesCount(
this.windowNodeCounter.count,
);
@ -415,14 +428,16 @@ export default class TabSessionManager {
}
}
/* === */
const lastLocationMsg = this.locationManager.moveGetLast(t, index);
const lastLocationMsg = this.locationManager.moveGetLast(t, index, true);
if (lastLocationMsg) {
const { tabNames } = this.state.get();
if (lastLocationMsg.documentTitle) {
tabNames[this.id] = lastLocationMsg.documentTitle;
const { tabNames, location } = this.state.get();
if (location !== lastLocationMsg.url) {
if (lastLocationMsg.documentTitle) {
tabNames[this.id] = lastLocationMsg.documentTitle;
}
// @ts-ignore comes from parent state
this.state.update({ location: lastLocationMsg.url, tabNames });
}
// @ts-ignore comes from parent state
this.state.update({ location: lastLocationMsg.url, tabNames });
}
const lastPerformanceTrackMessage =

View file

@ -43,6 +43,7 @@ export default class WebLivePlayer extends WebPlayer {
wpState,
(id) => this.messageManager.getNode(id),
agentId,
this.messageManager.updateSpriteMap,
uiErrorHandler,
);
this.assistManager.connect(session.agentToken!, agentId, projectId);

View file

@ -21,15 +21,10 @@ export default class WebPlayer extends Player {
inspectorMode: false,
mobsFetched: false,
};
private inspectorController: InspectorController;
protected screen: Screen;
protected readonly messageManager: MessageManager;
protected readonly messageLoader: MessageLoader;
private targetMarker: TargetMarker;
constructor(
@ -100,6 +95,12 @@ export default class WebPlayer extends Player {
// @ts-ignore
window.playerJumpToTime = this.jump.bind(this);
// @ts-ignore
window.__OPENREPLAY_DEV_TOOLS__.player = this;
}
enableVMode = () => {
this.messageManager.setVirtualMode(true);
}
preloadFirstFile(data: Uint8Array, fileKey?: string) {

View file

@ -140,11 +140,16 @@ class SimpleHeatmap {
ctx.drawImage(this.circle, p[0] - this.r, p[1] - this.r);
});
const colored = ctx.getImageData(0, 0, this.width, this.height);
this.colorize(colored.data, this.grad);
ctx.putImageData(colored, 0, 0);
return this;
try {
const colored = ctx.getImageData(0, 0, this.width, this.height);
this.colorize(colored.data, this.grad);
ctx.putImageData(colored, 0, 0);
} catch (e) {
// usually happens if session is corrupted ?
console.error('Error while colorizing heatmap:', e);
} finally {
return this;
}
}
private colorize(

View file

@ -3,14 +3,14 @@ import type { Socket } from 'socket.io-client';
import type { PlayerMsg, Store } from 'App/player';
import CanvasReceiver from 'Player/web/assist/CanvasReceiver';
import { gunzipSync } from 'fflate';
import { Message } from '../messages';
import { Message, MType } from '../messages';
import type Screen from '../Screen/Screen';
import MStreamReader from '../messages/MStreamReader';
import JSONRawMessageReader from '../messages/JSONRawMessageReader';
import Call, { CallingState } from './Call';
import RemoteControl, { RemoteControlStatus } from './RemoteControl';
import ScreenRecording, { SessionRecordingStatus } from './ScreenRecording';
import { debounceCall } from 'App/utils'
export { RemoteControlStatus, SessionRecordingStatus, CallingState };
export enum ConnectionStatus {
@ -82,6 +82,7 @@ export default class AssistManager {
private store: Store<typeof AssistManager.INITIAL_STATE>,
private getNode: MessageManager['getNode'],
public readonly agentId: number,
private readonly updateSpriteMap: () => void,
public readonly uiErrorHandler?: {
error: (msg: string) => void;
},
@ -200,6 +201,7 @@ export default class AssistManager {
peerId: this.peerID,
query: document.location.search,
}),
config: JSON.stringify(this.getIceServers()),
},
}));
@ -239,6 +241,11 @@ export default class AssistManager {
msg !== null;
msg = reader.readNext()
) {
if (msg.tp === MType.SetNodeAttribute) {
if (msg.value.includes('_$OPENREPLAY_SPRITE$_')) {
debounceCall(this.updateSpriteMap, 250)()
}
}
this.handleMessage(msg, msg._index);
}
};
@ -314,7 +321,7 @@ export default class AssistManager {
this.callManager = new Call(
this.store,
socket,
this.config,
this.getIceServers(),
this.peerID,
this.getAssistVersion,
{
@ -341,7 +348,7 @@ export default class AssistManager {
);
this.canvasReceiver = new CanvasReceiver(
this.peerID,
this.config,
this.getIceServers(),
this.getNode,
{
...this.session.agentInfo,
@ -354,6 +361,23 @@ export default class AssistManager {
});
}
private getIceServers = () => {
if (this.config) {
return this.config;
}
return [
{
urls: [
'stun:stun.l.google.com:19302',
'stun:stun1.l.google.com:19302',
'stun:stun2.l.google.com:19302',
'stun:stun3.l.google.com:19302',
'stun:stun4.l.google.com:19302',
],
},
] as RTCIceServer[];
};
/**
* Sends event ping to stats service
* */

View file

@ -43,7 +43,7 @@ export default class Call {
constructor(
private store: Store<State & { tabs: Set<string> }>,
private socket: Socket,
private config: RTCIceServer[] | null,
private config: RTCIceServer[],
private peerID: string,
private getAssistVersion: () => number,
private agent: Record<string, any>,
@ -146,7 +146,7 @@ export default class Call {
// create pc with ice config
const pc = new RTCPeerConnection({
iceServers: [{ urls: 'stun:stun.l.google.com:19302' }],
iceServers: this.config,
});
// If there is a local stream, add its tracks to the connection
@ -185,8 +185,7 @@ export default class Call {
pc.ontrack = (event) => {
const stream = event.streams[0];
if (stream && !this.videoStreams[remotePeerId]) {
const clonnedStream = stream.clone();
this.videoStreams[remotePeerId] = clonnedStream.getVideoTracks()[0];
this.videoStreams[remotePeerId] = stream.getVideoTracks()[0];
if (this.store.get().calling !== CallingState.OnCall) {
this.store.update({ calling: CallingState.OnCall });
}
@ -305,22 +304,18 @@ export default class Call {
}
try {
// if the connection is not established yet, then set remoteDescription to peer
if (!pc.localDescription) {
await pc.setRemoteDescription(new RTCSessionDescription(data.offer));
const answer = await pc.createAnswer();
await pc.setLocalDescription(answer);
if (isAgent) {
this.socket.emit('WEBRTC_AGENT_CALL', {
from: this.callID,
answer,
toAgentId: getSocketIdByCallId(fromCallId),
type: WEBRTC_CALL_AGENT_EVENT_TYPES.ANSWER,
});
} else {
this.socket.emit('webrtc_call_answer', { from: fromCallId, answer });
}
await pc.setRemoteDescription(new RTCSessionDescription(data.offer));
const answer = await pc.createAnswer();
await pc.setLocalDescription(answer);
if (isAgent) {
this.socket.emit('WEBRTC_AGENT_CALL', {
from: this.callID,
answer,
toAgentId: getSocketIdByCallId(fromCallId),
type: WEBRTC_CALL_AGENT_EVENT_TYPES.ANSWER,
});
} else {
logger.warn('Skipping setRemoteDescription: Already in stable state');
this.socket.emit('webrtc_call_answer', { from: fromCallId, answer });
}
} catch (e) {
logger.error('Error setting remote description from answer', e);
@ -370,10 +365,7 @@ export default class Call {
const pc = this.connections[callId];
if (!pc) return;
// if there are ice candidates then add candidate to peer
if (
data.candidate &&
(data.candidate.sdpMid || data.candidate.sdpMLineIndex !== null)
) {
if (data.candidate) {
try {
await pc.addIceCandidate(new RTCIceCandidate(data.candidate));
} catch (e) {
@ -388,13 +380,13 @@ export default class Call {
private handleCallEnd() {
// If the call is not completed, then call onCallEnd
if (this.store.get().calling !== CallingState.NoCall) {
this.callArgs && this.callArgs.onCallEnd();
this.callArgs && this.callArgs.onRemoteCallEnd();
}
// change state to NoCall
this.store.update({ calling: CallingState.NoCall });
// Close all created RTCPeerConnection
Object.values(this.connections).forEach((pc) => pc.close());
this.callArgs?.onCallEnd();
this.callArgs?.onRemoteCallEnd();
// Clear connections
this.connections = {};
this.callArgs = null;
@ -414,7 +406,7 @@ export default class Call {
// Close all connections and reset callArgs
Object.values(this.connections).forEach((pc) => pc.close());
this.connections = {};
this.callArgs?.onCallEnd();
this.callArgs?.onRemoteCallEnd();
this.store.update({ calling: CallingState.NoCall });
this.callArgs = null;
} else {
@ -443,7 +435,8 @@ export default class Call {
private callArgs: {
localStream: LocalStream;
onStream: (s: MediaStream, isAgent: boolean) => void;
onCallEnd: () => void;
onRemoteCallEnd: () => void;
onLocalCallEnd: () => void;
onReject: () => void;
onError?: (arg?: any) => void;
} | null = null;
@ -451,14 +444,16 @@ export default class Call {
setCallArgs(
localStream: LocalStream,
onStream: (s: MediaStream, isAgent: boolean) => void,
onCallEnd: () => void,
onRemoteCallEnd: () => void,
onLocalCallEnd: () => void,
onReject: () => void,
onError?: (e?: any) => void,
) {
this.callArgs = {
localStream,
onStream,
onCallEnd,
onRemoteCallEnd,
onLocalCallEnd,
onReject,
onError,
};
@ -549,7 +544,7 @@ export default class Call {
void this.initiateCallEnd();
Object.values(this.connections).forEach((pc) => pc.close());
this.connections = {};
this.callArgs?.onCallEnd();
this.callArgs?.onLocalCallEnd();
}
}

View file

@ -2,21 +2,7 @@ import logger from '@/logger';
import { VElement } from 'Player/web/managers/DOM/VirtualDOM';
import MessageManager from 'Player/web/MessageManager';
import { Socket } from 'socket.io-client';
let frameCounter = 0;
function draw(
video: HTMLVideoElement,
canvas: HTMLCanvasElement,
canvasCtx: CanvasRenderingContext2D,
) {
if (frameCounter % 4 === 0) {
canvasCtx.drawImage(video, 0, 0, canvas.width, canvas.height);
}
frameCounter++;
requestAnimationFrame(() => draw(video, canvas, canvasCtx));
}
import { toast } from 'react-toastify';
export default class CanvasReceiver {
private streams: Map<string, MediaStream> = new Map();
@ -25,10 +11,20 @@ export default class CanvasReceiver {
private cId: string;
private frameCounter = 0;
private canvasesData = new Map<
string,
{
video: HTMLVideoElement;
canvas: HTMLCanvasElement;
canvasCtx: CanvasRenderingContext2D;
}
>(new Map());
// sendSignal for sending signals (offer/answer/ICE)
constructor(
private readonly peerIdPrefix: string,
private readonly config: RTCIceServer[] | null,
private readonly config: RTCIceServer[],
private readonly getNode: MessageManager['getNode'],
private readonly agentInfo: Record<string, any>,
private readonly socket: Socket,
@ -56,6 +52,14 @@ export default class CanvasReceiver {
},
);
this.socket.on('webrtc_canvas_stop', (data: { id: string }) => {
const { id } = data;
const canvasId = getCanvasId(id);
this.connections.delete(id);
this.streams.delete(id);
this.canvasesData.delete(canvasId);
});
this.socket.on('webrtc_canvas_restart', () => {
this.clear();
});
@ -66,9 +70,7 @@ export default class CanvasReceiver {
id: string,
): Promise<void> {
const pc = new RTCPeerConnection({
iceServers: this.config
? this.config
: [{ urls: 'stun:stun.l.google.com:19302' }],
iceServers: this.config,
});
// Save the connection
@ -87,7 +89,7 @@ export default class CanvasReceiver {
const stream = event.streams[0];
if (stream) {
// Detect canvasId from remote peer id
const canvasId = id.split('-')[4];
const canvasId = getCanvasId(id);
this.streams.set(canvasId, stream);
setTimeout(() => {
const node = this.getNode(parseInt(canvasId, 10));
@ -95,14 +97,15 @@ export default class CanvasReceiver {
stream.clone() as MediaStream,
node as VElement,
);
if (node) {
draw(
videoEl,
node.node as HTMLCanvasElement,
(node.node as HTMLCanvasElement).getContext(
if (node && videoEl) {
this.canvasesData.set(canvasId, {
video: videoEl,
canvas: node.node as HTMLCanvasElement,
canvasCtx: (node.node as HTMLCanvasElement)?.getContext(
'2d',
) as CanvasRenderingContext2D,
);
});
this.draw();
} else {
logger.log('NODE', canvasId, 'IS NOT FOUND');
}
@ -138,7 +141,27 @@ export default class CanvasReceiver {
});
this.connections.clear();
this.streams.clear();
this.canvasesData.clear();
}
draw = () => {
if (this.frameCounter % 4 === 0) {
if (this.canvasesData.size === 0) {
return;
}
this.canvasesData.forEach((canvasData, id) => {
const { video, canvas, canvasCtx } = canvasData;
const node = this.getNode(parseInt(id, 10));
if (node) {
canvasCtx.drawImage(video, 0, 0, canvas.width, canvas.height);
} else {
this.canvasesData.delete(id);
}
});
}
this.frameCounter++;
requestAnimationFrame(() => this.draw());
};
}
function spawnVideo(stream: MediaStream, node: VElement) {
@ -154,6 +177,10 @@ function spawnVideo(stream: MediaStream, node: VElement) {
.play()
.then(() => true)
.catch(() => {
toast.error('Click to unpause canvas stream', {
autoClose: false,
toastId: 'canvas-stream',
});
// we allow that if user just reloaded the page
});
@ -166,6 +193,10 @@ function spawnVideo(stream: MediaStream, node: VElement) {
const startStream = () => {
videoEl
.play()
.then(() => {
toast.dismiss('canvas-stream');
clearListeners();
})
.then(() => console.log('unpaused'))
.catch(() => {
// we allow that if user just reloaded the page
@ -177,48 +208,14 @@ function spawnVideo(stream: MediaStream, node: VElement) {
return videoEl;
}
function spawnDebugVideo(stream: MediaStream, node: VElement) {
const video = document.createElement('video');
video.id = 'canvas-or-testing';
video.style.border = '1px solid red';
video.setAttribute('autoplay', 'true');
video.setAttribute('muted', 'true');
video.setAttribute('playsinline', 'true');
video.setAttribute('crossorigin', 'anonymous');
const coords = node.node.getBoundingClientRect();
Object.assign(video.style, {
position: 'absolute',
left: `${coords.left}px`,
top: `${coords.top}px`,
width: `${coords.width}px`,
height: `${coords.height}px`,
});
video.width = coords.width;
video.height = coords.height;
video.srcObject = stream;
document.body.appendChild(video);
video
.play()
.then(() => {
console.debug('started streaming canvas');
})
.catch((e) => {
console.error(e);
const waiter = () => {
void video.play();
document.removeEventListener('click', waiter);
};
document.addEventListener('click', waiter);
});
}
function checkId(id: string, cId: string): boolean {
return id.includes(cId);
}
function getCanvasId(id: string): string {
return id.split('-')[4];
}
/** simple peer example
* // @ts-ignore
* const peer = new SLPeer({ initiator: false })

View file

@ -17,6 +17,9 @@ export interface State {
export default class RemoteControl {
private assistVersion = 1;
private isDragging = false;
private dragStart: any | null = null;
private readonly dragThreshold = 3;
static readonly INITIAL_STATE: Readonly<State> = {
remoteControl: RemoteControlStatus.Disabled,
@ -81,6 +84,7 @@ export default class RemoteControl {
}
private onMouseMove = (e: MouseEvent): void => {
if (this.isDragging) return;
const data = this.screen.getInternalCoordinates(e);
this.emitData('move', [data.x, data.y]);
};
@ -154,16 +158,61 @@ export default class RemoteControl {
this.emitData('click', [data.x, data.y]);
};
private onMouseDown = (e: MouseEvent): void => {
if (this.store.get().annotating) return;
const { x, y } = this.screen.getInternalViewportCoordinates(e);
this.dragStart = [x, y];
this.isDragging = false;
const handleMove = (moveEvent: MouseEvent) => {
const { x: mx, y: my } =
this.screen.getInternalViewportCoordinates(moveEvent);
const [sx, sy] = this.dragStart!;
const dx = Math.abs(mx - sx);
const dy = Math.abs(my - sy);
if (
!this.isDragging &&
(dx > this.dragThreshold || dy > this.dragThreshold)
) {
this.emitData('startDrag', [sx, sy]);
this.isDragging = true;
}
if (this.isDragging) {
this.emitData('drag', [mx, my, mx - sx, my - sy]);
}
};
const handleUp = () => {
if (this.isDragging) {
this.emitData('stopDrag');
}
this.dragStart = null;
this.isDragging = false;
window.removeEventListener('mousemove', handleMove);
window.removeEventListener('mouseup', handleUp);
};
window.addEventListener('mousemove', handleMove);
window.addEventListener('mouseup', handleUp);
};
private toggleRemoteControl(enable: boolean) {
if (enable) {
this.screen.overlay.addEventListener('mousemove', this.onMouseMove);
this.screen.overlay.addEventListener('click', this.onMouseClick);
this.screen.overlay.addEventListener('wheel', this.onWheel);
this.screen.overlay.addEventListener('mousedown', this.onMouseDown);
this.store.update({ remoteControl: RemoteControlStatus.Enabled });
} else {
this.screen.overlay.removeEventListener('mousemove', this.onMouseMove);
this.screen.overlay.removeEventListener('click', this.onMouseClick);
this.screen.overlay.removeEventListener('wheel', this.onWheel);
this.screen.overlay.removeEventListener('mousedown', this.onMouseDown);
this.store.update({ remoteControl: RemoteControlStatus.Disabled });
this.toggleAnnotation(false);
}

View file

@ -44,45 +44,34 @@ const ATTR_NAME_REGEXP = /([^\t\n\f \/>"'=]+)/;
export default class DOMManager extends ListWalker<Message> {
private readonly vTexts: Map<number, VText> = new Map(); // map vs object here?
private readonly vElements: Map<number, VElement> = new Map();
private readonly olVRoots: Map<number, OnloadVRoot> = new Map();
/** required to keep track of iframes, frameId : vnodeId */
private readonly iframeRoots: Record<number, number> = {};
private shadowRootParentMap: Map<number, number> = new Map();
/** Constructed StyleSheets https://developer.mozilla.org/en-US/docs/Web/API/Document/adoptedStyleSheets
* as well as <style> tag owned StyleSheets
*/
private olStyleSheets: Map<number, OnloadStyleSheet> = new Map();
/** @depreacted since tracker 4.0.2 Mapping by nodeID */
private olStyleSheetsDeprecated: Map<number, OnloadStyleSheet> = new Map();
private upperBodyId: number = -1;
private nodeScrollManagers: Map<number, ListWalker<SetNodeScroll>> =
new Map();
private stylesManager: StylesManager;
private focusManager: FocusManager = new FocusManager(this.vElements);
private selectionManager: SelectionManager;
private readonly screen: Screen;
private readonly isMobile: boolean;
private readonly stringDict: Record<number, string>;
private readonly globalDict: {
get: (key: string) => string | undefined;
all: () => Record<string, string>;
};
public readonly time: number;
private virtualMode = false;
private hasSlots = false
private showVModeBadge?: () => void;
constructor(params: {
screen: Screen;
@ -94,6 +83,8 @@ export default class DOMManager extends ListWalker<Message> {
get: (key: string) => string | undefined;
all: () => Record<string, string>;
};
virtualMode?: boolean;
showVModeBadge?: () => void;
}) {
super();
this.screen = params.screen;
@ -103,6 +94,8 @@ export default class DOMManager extends ListWalker<Message> {
this.globalDict = params.globalDict;
this.selectionManager = new SelectionManager(this.vElements, params.screen);
this.stylesManager = new StylesManager(params.screen, params.setCssLoading);
this.virtualMode = params.virtualMode || false;
this.showVModeBadge = params.showVModeBadge;
setupWindowLogging(this.vTexts, this.vElements, this.olVRoots);
}
@ -163,6 +156,11 @@ export default class DOMManager extends ListWalker<Message> {
}
public getNode(id: number) {
const mappedId = this.shadowRootParentMap.get(id);
if (mappedId !== undefined) {
// If this is a shadow root ID, return the parent element instead
return this.vElements.get(mappedId);
}
return this.vElements.get(id) || this.vTexts.get(id);
}
@ -171,24 +169,21 @@ export default class DOMManager extends ListWalker<Message> {
id: number;
index: number;
}): void {
const { parentID, id, index } = msg;
let { parentID, id, index } = msg;
// Check if parentID is a shadow root, and get the real parent element if so
const actualParentID = this.shadowRootParentMap.get(parentID);
if (actualParentID !== undefined) {
parentID = actualParentID;
}
const child = this.vElements.get(id) || this.vTexts.get(id);
if (!child) {
logger.error('Insert error. Node not found', id);
return;
}
const parent = this.vElements.get(parentID) || this.olVRoots.get(parentID);
if ('tagName' in child && child.tagName === 'BODY') {
const spriteMap = new VSpriteMap(
'svg',
true,
Number.MAX_SAFE_INTEGER - 100,
Number.MAX_SAFE_INTEGER - 100,
);
spriteMap.node.setAttribute('id', 'OPENREPLAY_SPRITES_MAP');
spriteMap.node.setAttribute('style', 'display: none;');
child.insertChildAt(spriteMap, Number.MAX_SAFE_INTEGER - 100);
}
if (!parent) {
logger.error(
`${id} Insert error. Parent vNode ${parentID} not found`,
@ -303,11 +298,19 @@ export default class DOMManager extends ListWalker<Message> {
this.insertNode(msg);
this.removeBodyScroll(msg.id, vElem);
this.removeAutocomplete(vElem);
if (msg.tag === 'SLOT') {
this.hasSlots = true;
}
return;
}
case MType.MoveNode:
case MType.MoveNode: {
// if the parent ID is in shadow root map -> custom elements case
if (this.shadowRootParentMap.has(msg.parentID)) {
msg.parentID = this.shadowRootParentMap.get(msg.parentID)!;
}
this.insertNode(msg);
return;
}
case MType.RemoveNode: {
const vChild = this.vElements.get(msg.id) || this.vTexts.get(msg.id);
if (!vChild) {
@ -440,6 +443,21 @@ export default class DOMManager extends ListWalker<Message> {
logger.error('CreateIFrameDocument: Node not found', msg);
return;
}
// shadow DOM for a custom element + SALESFORCE (<slot>)
const isCustomElement =
vElem.tagName.includes('-') || vElem.tagName === 'SLOT';
if (isCustomElement) {
if (this.virtualMode) {
// Store the mapping but don't create the actual shadow root
this.shadowRootParentMap.set(msg.id, msg.frameID);
return;
} else if (this.hasSlots) {
this.showVModeBadge?.();
}
}
// Real iframes
if (this.iframeRoots[msg.frameID] && !this.olVRoots.has(msg.id)) {
this.olVRoots.delete(this.iframeRoots[msg.frameID]);
}
@ -452,7 +470,11 @@ export default class DOMManager extends ListWalker<Message> {
case MType.AdoptedSsInsertRule: {
const styleSheet = this.olStyleSheets.get(msg.sheetID);
if (!styleSheet) {
logger.warn('No stylesheet was created for ', msg);
logger.warn(
'No stylesheet was created for ',
msg,
this.olStyleSheets,
);
return;
}
insertRule(styleSheet, msg);

Some files were not shown because too many files have changed in this diff Show more