Compare commits

...
Sign in to create a new pull request.

903 commits

Author SHA1 Message Date
rjshrjndrn
68050f183f chore(dashboards): backend dashboard update
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-07-13 16:38:12 +02:00
rjshrjndrn
3530fbccb8 chore(dashboards): Adding more metrics to backend
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-07-12 14:59:33 +02:00
rjshrjndrn
e5c37cb0f2 docs(observability): How to install
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-07-12 13:16:10 +02:00
rjshrjndrn
0ee52dd72a chore(dashboard): updating components dashboard
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-07-12 13:12:44 +02:00
rjshrjndrn
ff0d473a15 chore(helm): updating backend dashboard
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-07-11 13:50:14 +02:00
rjshrjndrn
e4953d649e chore(monitoring): msk dashboard update
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-07-11 13:33:31 +02:00
rjshrjndrn
6812102061 chore(helm): Adding default storage provider for prometheus
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-07-11 13:05:56 +02:00
rjshrjndrn
fa173eeb4f chore(monitorng): Adding time estimation for msk lag
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-07-08 12:02:10 +02:00
rjshrjndrn
f5944566da chore(dashboard): Adding clickhouse dashboard
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-30 16:26:20 +02:00
rjshrjndrn
7507e28d80 chore(dashboards): Adding msk charts
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-30 14:22:35 +02:00
rjshrjndrn
fd8a770789 chore(helm): dynamic dashboard creation
Ref: https://github.com/helm/helm/issues/4157#issuecomment-490748085

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-30 14:17:45 +02:00
rjshrjndrn
c971c0ff05 chore(dashboards): nginx grafana dashboard
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-30 13:46:06 +02:00
rjshrjndrn
cc3ca56902 chore(helm): overriding fullName
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-30 13:31:04 +02:00
rjshrjndrn
df736cc840 fix(helm): fix namespace
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-30 13:12:12 +02:00
rjshrjndrn
06d568a96e fix(helm): value override
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-30 12:54:11 +02:00
rjshrjndrn
67022f538b chore(helm): Adding observability chart
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-30 12:32:58 +02:00
rjshrjndrn
d7e100e383 chore(helm): Adding grafana plugins
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-30 08:12:26 +02:00
rjshrjndrn
a5bc7a8f87 chore(dashboard): updated openreplay components dashboard
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-30 07:46:44 +02:00
rjshrjndrn
6eb15fa1cb chore(monitoring): updated variable format.
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-30 07:21:09 +02:00
rjshrjndrn
b1171d321b chore(dashboards): openreplay-components
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-30 06:59:05 +02:00
rjshrjndrn
7e6d4b5e2b chore(dashboards): Adding ngin-ingress dashboards.
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-29 19:12:09 +02:00
rjshrjndrn
0100684faa chore(helm): nginx-ingress-controller enabled metrics
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-29 19:04:24 +02:00
rjshrjndrn
4971d5ff25 chore(logging): Adding promtail config 2022-06-29 18:36:22 +02:00
rjshrjndrn
73902a73ef chore(helm): Update configs
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-29 18:28:10 +02:00
rjshrjndrn
d6e03aad52 chore(monitoring): Adding enterprise config
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-29 15:20:34 +02:00
rjshrjndrn
99ee5d5cb1 ci(dbmigrate): Create db migrate when there is change
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-29 12:25:21 +02:00
Alexander
1c1887f657
New configuration module (#558) 2022-06-29 12:20:42 +02:00
Taha Yassine Kraiem
831d90cb94 Merge remote-tracking branch 'origin/api-v1.7.0' into dev 2022-06-28 20:44:08 +02:00
Taha Yassine Kraiem
6b292a3ae7 feat(api): fixed assist error response 2022-06-28 20:43:00 +02:00
Taha Yassine Kraiem
4b0f3e1ffc feat(api): api-v1 handle wrong projectKey 2022-06-28 20:34:47 +02:00
Taha Yassine Kraiem
0bcfbedfd2 feat(api): api-v1 fixed search live sessions 2022-06-28 20:32:23 +02:00
Shekar Siri
b005d4dd31 change(ui) - show a message when mob file not found 2022-06-28 19:42:47 +02:00
Shekar Siri
497fae023b fix(ui) - audit trail date range custom picker alignment 2022-06-28 19:42:47 +02:00
Shekar Siri
ce09f5d54f change(ui) - audit trail count with comma 2022-06-28 19:42:47 +02:00
Shekar Siri
c78fb78927 change(ui) - show role edit on hover 2022-06-28 19:42:47 +02:00
Taha Yassine Kraiem
20644140e2 Merge remote-tracking branch 'origin/api-v1.7.0' into dev 2022-06-28 19:17:13 +02:00
Taha Yassine Kraiem
48fdd9fabc feat(api): api-v1 handle wrong projectKey
feat(api): api-v1 get live sessions
2022-06-28 19:05:40 +02:00
Shekar Siri
6e7a2f2472 change(ui) - show installation btn without mouse hover 2022-06-28 18:37:43 +02:00
Shekar Siri
0cb6341988 fix(ui) - redirect fix 2022-06-28 18:06:02 +02:00
Shekar Siri
a46d842c0b change(ui) - non admin user preference restrictions 2022-06-28 18:06:02 +02:00
sylenien
c8416e2c0c fix(ui): fix share popup styles 2022-06-28 17:55:47 +02:00
Shekar Siri
e9482d1629 change(ui) - redirect to the landing url on SSO login 2022-06-28 17:27:56 +02:00
sylenien
f3052d1ad0 fix(ui): fix typo 2022-06-28 16:44:34 +02:00
sylenien
08f9e3965e fix(ui): fix metric tables height and button placing 2022-06-28 15:25:54 +02:00
rjshrjndrn
198ea005d4 chore(helm): override branch name for db migration
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-28 15:14:05 +02:00
sylenien
4d22156b6a fix(ui): default tz fix 2022-06-28 15:05:26 +02:00
rjshrjndrn
7f9ca9ef18 ci(helm): skipping hooks for ci installation 2022-06-28 14:44:28 +02:00
Shekar Siri
1ed30b35d7 change(ui) - error widget border 2022-06-28 14:40:56 +02:00
sylenien
63f77c0c3e fix(ui): fix capture rate 2022-06-28 14:31:03 +02:00
rjshrjndrn
dd6d8ec566 ci(helm): skip hooks
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-28 14:30:27 +02:00
Shekar Siri
0e31fe43af change(ui) - hide back button based on url query param iframe=true 2022-06-28 14:29:31 +02:00
Shekar Siri
c44bb5da79 fix(ui) - disable body scroll on modal open 2022-06-28 14:09:43 +02:00
sylenien
41e093312a fix(ui): fix bookmarking 2022-06-28 13:29:07 +02:00
sylenien
09cde2e5ec fix(ui): fix for timezone storage and format 2022-06-28 12:40:27 +02:00
Shekar Siri
2d22bae2ff change(ui) - modalprovider 2022-06-28 12:30:47 +02:00
sylenien
773bfc7995 fix(ui): fix breadcrumbs chevron icon 2022-06-28 11:19:12 +02:00
sylenien
e5a73ada4f fix(ui): fix textelipsis comp 2022-06-28 10:51:48 +02:00
sylenien
86a6aa6c07 fix(ui): fix bookmarking/vaulting 2022-06-28 09:24:42 +02:00
sylenien
bf80997c0c fix(tracker): fix peer hack for better build support 2022-06-28 09:20:11 +02:00
sylenien
a03b441f97 fix(tracker): fix assist import in order to prevent fails with next imports 2022-06-28 09:20:11 +02:00
Shekar Siri
7022faa5eb change(ui) - assist list loader
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-27 20:40:56 +02:00
Shekar Siri
110f215e7d fix(ui) - lazyload loader 2022-06-27 20:03:11 +02:00
Taha Yassine Kraiem
a2588df4cc feat(api): changed build logic 2022-06-27 19:55:09 +02:00
Taha Yassine Kraiem
747487cc4c Merge remote-tracking branch 'origin/dev' into api-v1.7.0 2022-06-27 19:50:32 +02:00
rjshrjndrn
b064732c01 ci(fix): resetting vars file
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-27 19:21:17 +02:00
rjshrjndrn
fa815a7cb6 ci(fix): cleaning old assets
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-27 18:58:43 +02:00
Shekar Siri
c26e715b5d fix(ui) - API_EDP 2022-06-27 18:32:25 +02:00
rjshrjndrn
e05ba2df47 ci(helm): updated comment
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-27 18:17:53 +02:00
rjshrjndrn
8ed69347f6 ci(fix): actions pointing to correct cluster
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-27 17:54:57 +02:00
Shekar Siri
6384bf9e9e fix(ui) - permission check updates 2022-06-27 17:40:28 +02:00
rjshrjndrn
082acccac4 ci(actions): skipping migration in actions
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-27 17:24:27 +02:00
rjshrjndrn
5019fba5b2 chore(helm): enable skipMigration Flag
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-27 17:16:23 +02:00
rjshrjndrn
cccd97c07a fix(cicd): proper image tag
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-27 17:16:23 +02:00
Shekar Siri
db6609d908 fix(ui) - duration and not data message for sessions and errors 2022-06-27 16:32:03 +02:00
Shekar Siri
a3e99a6217 fix(ui) - end date fix and other changes 2022-06-27 15:53:11 +02:00
rjshrjndrn
a14dfb4a79 ci(fix): frontend ee deployment
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-27 15:05:46 +02:00
Shekar Siri
16503a2fae change(ui) - metric type icon in metric list 2022-06-27 14:08:18 +02:00
Shekar Siri
661a4364dc change(ui) - error and sessions border 2022-06-27 14:08:18 +02:00
Shekar Siri
941f8c9b11 change(ui) - removed unused from header component 2022-06-27 14:08:18 +02:00
Shekar Siri
7787f19c00 change(ui) - version changes in env.sample 2022-06-27 14:08:18 +02:00
rjshrjndrn
917ab96723 ci(fix): kubeconfig path
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-27 14:01:42 +02:00
rjshrjndrn
5d52d56e12 ci(fix): change kubeconfig auth env
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-27 13:51:09 +02:00
Alexander Zavorotynskiy
3f52992e33 fix(backend): fixed config var name in integrations service 2022-06-27 13:45:11 +02:00
rjshrjndrn
22b3ffdc6d ci(worker): cache disabled as it's consuming space, and actions failing
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-27 13:41:39 +02:00
rjshrjndrn
f1920a28bf ci(frontend): deploying ee along with oss
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-27 13:34:57 +02:00
sylenien
6592f33827 fix(ui): fix add outside click for modal, fix right menu headers 2022-06-27 12:32:54 +02:00
sylenien
8ae15c799e fix(ui): fix jira integration, fix widget name esc handling, minor fixes 2022-06-27 12:08:09 +02:00
Alexander Zavorotynskiy
95c9b6e3f5 feat(backend): minor fixes after prerelease tests 2022-06-27 10:35:05 +02:00
Shekar Siri
179dbd22d5 fix(ui) - roles and permissions 2022-06-24 20:48:06 +02:00
Taha Yassine Kraiem
20aaff933e Merge remote-tracking branch 'origin/dev' into api-v1.7.0 2022-06-24 20:25:01 +02:00
Taha Yassine Kraiem
3089d02e7d feat(api): fixed invite user 2022-06-24 20:23:19 +02:00
Taha Yassine Kraiem
7d0a0c998e feat(api): changed /limits 2022-06-24 20:06:37 +02:00
Taha Yassine Kraiem
855830a9a8 feat(api): changed /notifications/count response 2022-06-24 19:51:08 +02:00
Taha Yassine Kraiem
3264a424d1 feat(DB): changed resources primary keys 2022-06-24 19:29:54 +02:00
Taha Yassine Kraiem
66a1a1c0d1 feat(api): fixed /notifications/count 2022-06-24 19:07:33 +02:00
Shekar Siri
399352dd7f change(ui) - funnels checking for min two events 2022-06-24 18:56:39 +02:00
Shekar Siri
fc01ffb6bf change(ui) - sessions search checking for empty fitler values 2022-06-24 18:35:35 +02:00
Shekar Siri
77096976ea fix(ui) - session settings input event 2022-06-24 18:35:35 +02:00
Shekar Siri
783e029ec9 change(ui) - packge lock updates 2022-06-24 18:35:35 +02:00
Shekar Siri
c662f26e38 change(ui) - tailwind version 2022-06-24 18:35:35 +02:00
rjshrjndrn
03fa6c5e22 chore(build): fix script return code
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 18:22:30 +02:00
rjshrjndrn
2c3841e57e chore(local_deploy): Deploy frontend
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 18:17:46 +02:00
sylenien
810e97605b fix(ui): fix for js errors widget styles 2022-06-24 17:20:24 +02:00
Alex Kaminskii
236ac05c92 refactor(backend): use analytics topic for IntegrationEvent 2022-06-24 16:37:51 +02:00
Alex Kaminskii
4e439354c3 style(backend): rename RawErrorEvent to IntegrationEvent 2022-06-24 16:32:46 +02:00
rjshrjndrn
3259c6667a ci(helm): use atomic for deploying
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 16:11:35 +02:00
rjshrjndrn
e38ad0a7b2 ci(build): moving env sample
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 16:08:37 +02:00
rjshrjndrn
c1fa34d2ff fix(build): correct file name
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 16:07:33 +02:00
rjshrjndrn
f6e21ee07e build(frontend): removed unnecessary step
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 16:07:18 +02:00
rjshrjndrn
01f6c6b54c fix(frontend): build script comment
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 16:07:18 +02:00
rjshrjndrn
1134fc133c fix(build): docker file priority
Last step is the default one in docker build.

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 16:07:18 +02:00
Shekar Siri
c7741ebad8 change(ui) - docker copy env file 2022-06-24 15:51:59 +02:00
sylenien
bb61a4543f fix(ui): rm unused code 2022-06-24 15:43:13 +02:00
sylenien
8f31341881 fix(ui): tweak webpack config 2022-06-24 15:43:13 +02:00
sylenien
61b2c3e32c fix(ui): tweak webpack config 2022-06-24 15:43:13 +02:00
sylenien
6f9a5e71f1 fix(ui): small design review fixes 2022-06-24 15:43:13 +02:00
rjshrjndrn
16bfdc5c9c ci(fix): inject proper env value.
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 15:37:47 +02:00
Shekar Siri
7d19b77e94 fix(ui) - no data msg and padding 2022-06-24 15:24:06 +02:00
rjshrjndrn
08bf88b411 build(frontend): Adding docker buildkit support
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 15:06:46 +02:00
rjshrjndrn
08d2375683 ci(actions): enable docker buildkit
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 15:03:19 +02:00
rjshrjndrn
abe4f17bbc ci(fix): spelling
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 14:59:11 +02:00
rjshrjndrn
f17fd33120 chore(build): Creating separate build for cicd
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 14:57:24 +02:00
rjshrjndrn
260d758592 ci(fix): change build
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 12:57:56 +02:00
rjshrjndrn
aaf42f6157 ci(frontend): remove docker caching
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 12:52:07 +02:00
rjshrjndrn
e975c07482 ci(frontend): optimizing build for caching
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 12:49:59 +02:00
rjshrjndrn
bdc3fcf22b ci(frontend): run only the latest build
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 12:37:44 +02:00
sylenien
7302490444 fix(ui): fix item menu styles 2022-06-24 12:27:03 +02:00
rjshrjndrn
0374f0934a build(frontend): decoupling yarn and build for better caching.
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 12:25:35 +02:00
rjshrjndrn
46b3ec2025 ci(fix): change step name
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 12:22:51 +02:00
rjshrjndrn
126a7561d8 ci(frontend): removed npm caching from host
As the build is happening in container.

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 12:20:14 +02:00
Alex Kaminskii
015fe57355 refactor(tracker): get rid of instanceof checks in observer (use nodeName and nodeType guards) 2022-06-24 12:17:13 +02:00
rjshrjndrn
febdfd72e3 ci(fix): frontend buld deploy
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 12:13:50 +02:00
rjshrjndrn
4288da245c ci(frontend): Update deploy to helm chart.
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-24 12:00:09 +02:00
Alexander Zavorotynskiy
f77af991d6 feat(backend): disabled iOS heuristics 2022-06-24 11:47:24 +02:00
Alexander Zavorotynskiy
2c0880a161 feat(backend): add user's timestamp to ender logic, removed some messages from db batches 2022-06-24 10:10:35 +02:00
Shekar Siri
3b6cb3ee0e fix(ui) - alert metric check 2022-06-23 19:27:38 +02:00
Shekar Siri
8fafc878eb fix(ui) - login check 2022-06-23 19:21:58 +02:00
Shekar Siri
a63ff8ae12 fix(ui) - widget change detection on route change 2022-06-23 18:59:28 +02:00
Shekar Siri
b22f1488a6 fix(ui) - icon button 2022-06-23 18:59:28 +02:00
Shekar Siri
0034a22fd1 fix(ui) - alert form segment selection 2022-06-23 18:59:28 +02:00
Taha Yassine Kraiem
bee4abeb63 feat(api): EE env-vars override 2022-06-23 18:19:52 +02:00
sylenien
6114254671 fix(ui): fix dashboard widget scroll position on change 2022-06-23 18:01:26 +02:00
Shekar Siri
68b8cc3586 fix(ui) - last item border 2022-06-23 17:59:19 +02:00
Shekar Siri
889a4313c5 fix(ui) - invitation link button 2022-06-23 17:59:19 +02:00
Shekar Siri
a46240feb4 fix(ui) - signup form submit button type 2022-06-23 17:59:19 +02:00
Rajesh Rajendran
7dcd9c99a6
Move frontend as a separate container (#553)
* chore(frontend): build dockerimage for frontend

* chore(helm): remove frontend files from minio.

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* chore(helm): Adding frontend chart

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* chore(helm): remove grafana ingress from community charts

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* chore(helm): removing minio-frontend ingress

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* fix(helm): ingress rewrite

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* chore(build): give priority to env image tag

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* chore(frontend): adding nginx.conf for frontend container

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* chore(helm): disable minio if not used

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-23 15:53:18 +00:00
Shekar Siri
c548cd7cbe change(ui) - metric list item name casing 2022-06-23 16:29:19 +02:00
Shekar Siri
7b4a189588 change(ui) - sessions and errors count 2022-06-23 16:26:01 +02:00
Shekar Siri
e1c633af99 change(ui) - project limit check 2022-06-23 16:16:24 +02:00
Shekar Siri
952515b293 change(ui) - remote pull resolve conflicts 2022-06-23 16:04:33 +02:00
Alex Kaminskii
71f5f3a797 fix(backend): remove tab chars in url before parse 2022-06-23 15:56:11 +02:00
sylenien
c2f93c6c42 fix(ui): fix react warning 2022-06-23 15:45:33 +02:00
sylenien
98ebef88c3 fix(ui): role button for timeline 2022-06-23 15:40:25 +02:00
sylenien
47af08e0fe fix(ui): minor ui fixes 2022-06-23 15:37:23 +02:00
sylenien
6f3e66ee46 fix(ui): more ui fixes, typing for router 2022-06-23 15:37:23 +02:00
sylenien
2e51918cc7 fix(ui): typings for iconts, fix for widget name field 2022-06-23 15:37:23 +02:00
sylenien
b55145e580 fix(ui): minor ui fixes 2022-06-23 15:37:23 +02:00
Taha Yassine Kraiem
fa6e8087e1 Merge remote-tracking branch 'origin/api-v1.7.0' into dev 2022-06-23 15:00:28 +02:00
Taha Yassine Kraiem
ebc2f809a3 feat(api): changed /limits response 2022-06-23 15:00:10 +02:00
Shekar Siri
f7cf8ac269 change(ui) - global limits 2022-06-23 14:56:01 +02:00
Shekar Siri
c622891299 change(ui) - funnels calls 2022-06-23 14:56:01 +02:00
rjshrjndrn
ece5b482e6 docs(frontend): removed unnecessary code
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-23 14:08:40 +02:00
Alex Kaminskii
eb967919f5 style(tracker): img module - use attributeFilter in observer 2022-06-23 13:13:16 +02:00
Shekar Siri
5d9cc9b7ea change(ui) - errors and sessions widget click 2022-06-23 12:35:50 +02:00
Shekar Siri
0fb37ab695 change(ui) - precentage removed floating 2022-06-23 12:21:09 +02:00
Shekar Siri
ceeeb1ef0c change(ui) - braedcrumb first letter cap 2022-06-23 12:21:09 +02:00
sylenien
5e8b663fb6 fix(tracker): add check for sets 2022-06-23 12:05:21 +02:00
sylenien
7499b05431 fix(tracker): fix srcset tracking 2022-06-23 12:05:21 +02:00
sylenien
3607f45f8a fix(tracker): fix srcset tracking 2022-06-23 12:05:21 +02:00
Shekar Siri
af23769d74 change(ui) - capitalize first letter 2022-06-23 11:59:05 +02:00
Shekar Siri
f96100fea9 Merge branch 'sessions-list' into dev 2022-06-23 11:33:33 +02:00
sylenien
ef935e3ee2 fix(ui): fix timeline icons overlap 2022-06-23 10:03:20 +02:00
Alex Kaminskii
9f7b8aec5b fix(tracker): send metadata on start 2022-06-22 19:48:17 +02:00
Shekar Siri
78363606ad change(ui) - sessions list layout 2022-06-22 19:41:29 +02:00
Shekar Siri
b38fbe1a30 fix(ui) - dropdown fixes 2022-06-22 18:33:32 +02:00
Shekar Siri
d7e680247d fix(ui) - dropdown fixes 2022-06-22 18:28:52 +02:00
Shekar Siri
188e504bb7 fix(ui) - dropdown fixes 2022-06-22 17:59:08 +02:00
Alex Kaminskii
c8ec85c98e style(tracker): type fix 2022-06-22 17:32:29 +02:00
Alex Kaminskii
a4f2191757 style(frontend/player):type fixes 2022-06-22 17:30:00 +02:00
Shekar Siri
47774191b1 fix(ui) - player init 2022-06-22 17:16:09 +02:00
rjshrjndrn
5ff8fc6960 chore(helm): kafka topic size update
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-22 17:12:54 +02:00
sylenien
e07404de22 fix(ui): add more ts aliases 2022-06-22 17:07:09 +02:00
sylenien
aadd7d5418 fix(ui): fix icon build script to rewrite clipPath and clipRule 2022-06-22 17:02:08 +02:00
sylenien
fe14907303 fix(ui): fix icon build script to remove warnings 2022-06-22 16:57:16 +02:00
Delirium
ee373cc0b4
fix(ui): small ui fixes and improvements (#550) 2022-06-22 16:51:53 +02:00
Shekar Siri
0ef43a30fd change(ui) - switch to yern 2022-06-22 16:39:33 +02:00
Shekar Siri
719dab4b8e
Update frontend.yaml 2022-06-22 16:38:34 +02:00
Shekar Siri
3d82678c26 fixed(ui) - review fixes 2022-06-22 16:18:37 +02:00
Taha Yassine Kraiem
84a59710c3 Merge remote-tracking branch 'origin/api-v1.7.0' into dev 2022-06-22 16:16:58 +02:00
Taha Yassine Kraiem
6cb997def7 feat(api): changed get session's live flag 2022-06-22 16:16:38 +02:00
Alex Kaminskii
c2e95f8d98 fix(frontend): init player once 2022-06-22 15:56:27 +02:00
Taha Yassine Kraiem
3c47cebd53 Merge remote-tracking branch 'origin/api-v1.7.0' into dev 2022-06-22 15:50:20 +02:00
Taha Yassine Kraiem
7ed0d28e40 feat(assist): cleaned extra files
feat(assist): autocomplete return capitalized type
2022-06-22 15:49:59 +02:00
Alex Kaminskii
d4c692b2d4 fix(frontend/player): no inplace operations in loadFiles fn 2022-06-22 15:38:48 +02:00
Alex K
33eca54031
Merge pull request #542 from openreplay/tracker-wworker-writer-bug
Worker console fix
*worker activity state introduced 
*late worker stop 
*BatchWriter refactor
2022-06-22 14:15:56 +02:00
Alex Kaminskii
00572c0f38 fix(frontend/dev): verbose function 2022-06-22 14:11:00 +02:00
Shekar Siri
db4844c4c5 change(ui) - funnel icon 2022-06-22 14:08:06 +02:00
sylenien
13043f6ee7 fix(tracker): rm unused 2022-06-22 14:05:35 +02:00
Alex Kaminskii
fe90b4cc26 feat(frontend/player): smooth cursor 2022-06-22 13:41:53 +02:00
Alex Kaminskii
9b66433348 feat(frontend): store dev options in localStorage 2022-06-22 13:27:39 +02:00
sylenien
794c7f72d4 fix(tracker): remove wworker logs(unused) 2022-06-22 12:49:46 +02:00
sylenien
22d7c4acd0 fix(tracker): change checks for state update 2022-06-22 12:41:07 +02:00
Alex K
e56fee3134
Merge pull request #524 from openreplay/hide-containers-rule
feat(ui): add option to mask entire HTML/SVG containers and their children tree
2022-06-22 12:26:15 +02:00
sylenien
0a66b23613 fix(tracker): move worker stop to the end of stop func 2022-06-22 12:24:07 +02:00
Shekar Siri
7910b9e872 feat(ui) - metrics list icons 2022-06-22 12:10:45 +02:00
rjshrjndrn
04d8148be6 chore(helm): change default clickhouse resource limit
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-22 11:06:49 +02:00
sylenien
308ef872f4 fix(tracker): code review 2022-06-22 09:54:26 +02:00
sylenien
845cf64e44 fix(tracker): rm console 2022-06-22 09:51:19 +02:00
sylenien
f1998eab3c fix(tracker): move activity state under worker start 2022-06-22 09:48:49 +02:00
Shekar Siri
05990478c5 feat(ui) - filters issue - operator dropdown refresh 2022-06-21 18:47:12 +02:00
Shekar Siri
0c45d43bb9 feat(ui) - filters issues live vs offline 2022-06-21 18:33:38 +02:00
Shekar Siri
2b5d85cb35 feat(ui) - dropdown alignments 2022-06-21 17:23:56 +02:00
Shekar Siri
44853973cf feat(ui) - integration icon and other checks 2022-06-21 17:23:56 +02:00
sylenien
692a0505e8 fix(tracker): typo fix 2022-06-21 16:59:37 +02:00
sylenien
5e7e498088 fix(tracker): fix state updating 2022-06-21 16:59:37 +02:00
sylenien
fedd89c119 fix(tracker): wworker build fix 2022-06-21 16:59:37 +02:00
sylenien
8750448841 fix(tracker): typo 2022-06-21 16:59:37 +02:00
sylenien
d6fd7b312a fix(tracker): rm unused 2022-06-21 16:59:37 +02:00
sylenien
8d919e49cc fix(tracker): add optional data in error 2022-06-21 16:59:37 +02:00
sylenien
869a25169f fix(tracker): potential fix for writer busy status 2022-06-21 16:59:37 +02:00
Alexander Zavorotynskiy
caf66b305a fix(backend): fixed bug when ender triggered on sessionEnd message 2022-06-21 16:19:22 +02:00
Rajesh Rajendran
26daf936c5
removing cache for worker build 2022-06-21 10:53:21 +00:00
Shekar Siri
28aa99a668 feat(ui) - metric to session player navigation flow with modal 2022-06-21 12:41:33 +02:00
Shekar Siri
ea7b37441b feat(ui) - metric type check for alert 2022-06-21 11:17:08 +02:00
Shekar Siri
c5cc24fc52 feat(ui) - errors details modal 2022-06-21 11:17:08 +02:00
Alexander Zavorotynskiy
b848b89536 feat(backend): removed not necessary message type 2022-06-21 11:11:33 +02:00
rjshrjndrn
d7a4005adc refactor(helm): format file.
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-21 09:40:58 +02:00
rjshrjndrn
6862652744 chore(helm): variable for kafka retention.
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-21 09:40:58 +02:00
ShiKhu
4a5093addf refactor(tracker/BatchWriter): explicit logic 2022-06-21 01:05:20 +02:00
Shekar Siri
67b83deddb feat(ui) - errors details modal 2022-06-20 19:01:54 +02:00
Taha Yassine Kraiem
7444c2d999 feat(api): changed funnel's dropDueToIssues 2022-06-20 18:45:03 +02:00
Taha Yassine Kraiem
86bbf49014 feat(api): return issue details if issue not found in funnel 2022-06-20 17:35:08 +02:00
Shekar Siri
e30e5c22ad feat(ui) - errors details modal 2022-06-20 17:25:54 +02:00
Shekar Siri
1bcb0dfc01 feat(ui) - funnels more steps 2022-06-20 17:25:54 +02:00
rjshrjndrn
fb164af465 chore(helm): Adding postgres string in ender
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-20 11:58:06 +02:00
rjshrjndrn
94fc4d693e fix(actions): image override
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-20 11:40:07 +02:00
rjshrjndrn
1a66daa3a2 chore(helm): details of cleaning.
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-20 10:29:05 +02:00
rjshrjndrn
3ca389ff3c fix(helm): nginx lb algorithm
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-20 10:11:52 +02:00
rjshrjndrn
1898f18d6b fix(helm): efs clean cron path
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-20 09:44:39 +02:00
Alexander
623e241afb
feat(backend): moved recording sessionStart to db into http service and sessionEnd into ender service (#545)
Co-authored-by: Alexander Zavorotynskiy <alexander@openreplay.com>
2022-06-20 09:26:05 +02:00
Alexander Zavorotynskiy
3da78cfe62 feat(backend): added metadata insertion retrier (temp solution) 2022-06-17 17:33:52 +02:00
Taha Yassine Kraiem
592cbd5fd5 feat(api): errors search ignore Script error on query level 2022-06-17 17:26:22 +02:00
Taha Yassine Kraiem
e99776778f feat(api): errors search ignore Script error on query level 2022-06-17 16:54:39 +02:00
rjshrjndrn
f34c433a42 chore(backend): clean go mod
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-17 16:27:43 +02:00
Shekar Siri
cbc4c25a8e feat(ui) - sessions - widget - data reload 2022-06-17 15:48:14 +02:00
Taha Yassine Kraiem
98b71b13fe Merge remote-tracking branch 'origin/api-v1.7.0' into dev 2022-06-17 15:41:49 +02:00
Taha Yassine Kraiem
6421d9b9b8 feat(api): elasticsearch fixed typo 2022-06-17 15:40:47 +02:00
Shekar Siri
93455cd746 feat(ui) - sessions - widget - pagination 2022-06-17 15:38:25 +02:00
Shekar Siri
10c064c99c feat(ui) - sessions - widget - pagination 2022-06-17 15:38:25 +02:00
Taha Yassine Kraiem
d007d7da5a feat(api): elasticsearch upgrade fix 2022-06-17 15:24:30 +02:00
Alexander Zavorotynskiy
951ffa0320 fix(backend/db): fixed bug (index row size exceeds maximum) by adding left() func in sql requests 2022-06-17 14:48:06 +02:00
Taha Yassine Kraiem
9e7e35769c Merge remote-tracking branch 'origin/api-v1.7.0' into dev 2022-06-17 12:57:03 +02:00
Taha Yassine Kraiem
c10140b8d1 feat(api): changed empty funnel response 2022-06-17 12:39:21 +02:00
Taha Yassine Kraiem
38b65537c7 feat(api): fixed Elasticsearch upgrade 2022-06-17 11:31:31 +02:00
rjshrjndrn
215d889782 ci(workers): build both ee and oss for deployment changes
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-17 11:01:52 +02:00
Taha Yassine Kraiem
1ee50b62ed feat(api): full dependencies upgrade 2022-06-17 10:53:43 +02:00
rjshrjndrn
a08ac6101a chore(helm): change nginx-ingress default lb to ewma
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-17 10:45:41 +02:00
Taha Yassine Kraiem
778db9af34 Merge remote-tracking branch 'origin/api-v1.7.0' into api-v1.7.0 2022-06-17 10:43:13 +02:00
Taha Yassine Kraiem
4d111d6f4a feat(db): migrate to v1.7.0: fixed cross-database references issue 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
7beb08f398 feat(db): migrate old funnels to new metric-funnels 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
891c7600a7 feat(api): custom metrics errors pagination
feat(api): custom metrics sessions pagination
2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
9fb5e7c4d1 feat(api): fixed typo 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
f76c621350 feat(assist): support null&empty values for search
feat(assist): changed single-session search
feat(api): support null&empty values for live sessions search
feat(api): support key-mapping for different names
feat(api): support platform live-sessions search
2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
6cc7372187 feat(api): support nested-key-sort for live sessions 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
4e22038137 feat(assist): changed pagination response
feat(assist): allow nested-key sort
feat(api): support new live sessions pagination response
2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
2e5acdabc3 feat(assist): full autocomplete
feat(assist): solved endpoints conflicts
feat(api): live sessions full autocomplete
2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
d1ef7ea1c7 feat(assist): full search
feat(api): live sessions full search
2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
47fb100b4f feat(assist): fixed multiple values filter support for search 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
ab02495f63 feat(api): changed assist search payload 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
a59a8c0133 feat(assist): changed debug 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
bd9dbc9393 feat(assist): payload extraction debug 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
4fe3f87d46 feat(api): assist autocomplete 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
c0c1a86209 feat(assist): autocomplete 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
fbe37babbc feat(assist): sessions search handle nested objects 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
ccf951f8e4 feat(api): optimized live session check
feat(assist): optimized live session check
feat(assist): sort
feat(assist): pagination
2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
0aa94bbc3c feat(assist): assist changed search payload 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
ef609aa196 feat(api): search live sessions 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
43184d5c43 feat(assist): assist refactored 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
c6a6a77e71 feat(assist): EE assist search 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
181195ffde feat(assist): assist refactored 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
58aea53101 feat(assist): assist upgrade uWebSockets
feat(assist): assist upgrade SocketIo
2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
03dbf42d11 feat(assist): FOSS assist search 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
7d4d0fadbd feat(api): requirements upgrade 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
5b1185b872 feat(api): metric-funnel changed response 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
c7c6cd2187 feat(api):metrics get sessions related to issue 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
1448cb45e9 feat(api): metrics table of errors 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
b4b3a6c26e feat(api): custom metrics fixed templates response 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
f296b27346 feat(api): optimised get issues for get session-details 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
531b112439 feat(api): fixed custom metrics timestamp issue 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
c68edbc705 feat(api): fixed login 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
7d4596c074 feat(api): get sessions details fix 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
03e0dbf0e4 feat(api): optimised get session details 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
46e7f5b83e feat(api): custom metrics config 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
bafae833d5 feat(api): limited long task DB 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
421a1f1104 feat(api): custom metrics config 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
405d83d4e0 feat(api): optimised weekly report 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
6c377bc4e5 feat(api): fixed login response 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
40d60f7769 feat(api): fixed login response 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
557d855ae5 feat(api): changed login response 2022-06-17 10:42:30 +02:00
Taha Yassine Kraiem
0dd7914375 feat(api): EE changed weekly report
feat(api): changed login response
2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
63d2fce3b5 feat(api): fixed weekly report
feat(api): optimised weekly report
2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
119ecd7743 feat(api): ignore weekly report if SMTP not configured 2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
8aec595495 feat(api): changed connexion pool configuration
feat(alerts): changed connexion pool configuration
2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
779c85dfda feat(api): changes 2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
0fd7d1d80c feat(api): changes
feat(db): changes
2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
5e85da6533 feat(api): changed pages_response_time_distribution response 2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
26ce0c8e86 feat(api): changed crashes response 2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
3f35b01a5e feat(api): changed speed_location response 2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
597da9fc11 feat(api): changed speed_location response 2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
fa7a57eb3f feat(api): changed slowest_domains response 2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
23a98d83d7 feat(api): table of sessions widget 2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
53fc845f9a feat(api): errors widget chart
feat(api): funnels widget chart
2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
bf60c83f3b feat(api): errors widget 2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
4912841a9e feat(api): funnel widget issues 2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
8d49a588e4 feat(api): funnel widget 2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
b5a646b233 feat(api): EE fixed edition 2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
7d426ee79a feat(api): fixed notifications count query 2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
06a52e505e feat(api): fixed edition
feat(api): fixed expiration date
feat(api): fixed change name
feat(api): fixed change role
feat(api): fixed has password
feat(api): refactored edit user
feat(api): refactored edit member
2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
667fe3dd79 feat(db): removed user's appearance
feat(db): removed generated_password
feat(api): merged account&client
feat(api): cleaned account response
feat(api): removed user's appearance
feat(api): removed generated_password
feat(api): limits endpoint
feat(api): notifications/count endpoint
2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
d86ca3c7ec feat(db): EE CH new structure 2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
e92f14dc17 feat(db): EE CH new structure 2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
81503030e4 feat(db): EE CH new structure 2022-06-17 10:42:29 +02:00
Taha Yassine Kraiem
10f26ab45c feat(api): clean script 2022-06-17 10:42:27 +02:00
Taha Yassine Kraiem
5968b55934 feat(api): refactored user-auth 2022-06-17 10:42:00 +02:00
Taha Yassine Kraiem
c2ea4fb4b6 feat(api): metrics changed web vitals description
feat(db): changed metric's monitoring essentials category to web vitals
2022-06-17 10:42:00 +02:00
Taha Yassine Kraiem
254202ba85 feat(api): fixed changed SearchSession payload schema 2022-06-17 10:42:00 +02:00
Taha Yassine Kraiem
b2732eb9be feat(api): changed SearchSession payload schema 2022-06-17 10:42:00 +02:00
Taha Yassine Kraiem
a3ba925cea feat(api): centralized 'order'
feat(api): transform 'order' casing
2022-06-17 10:42:00 +02:00
Taha Yassine Kraiem
20f7c0fb70 feat(DB): changed metrics category from Overview to Monitoring Essentials 2022-06-17 10:42:00 +02:00
Taha Yassine Kraiem
9c9452c530 feat(api): upgraded python base image
feat(alerts): upgraded python base image
2022-06-17 10:42:00 +02:00
Taha Yassine Kraiem
c12cea6f6b feat(api): fixed CH client format 2022-06-17 10:42:00 +02:00
Taha Yassine Kraiem
6c0aca2f8c feat(DB): changed partition expression 2022-06-17 10:42:00 +02:00
Taha Yassine Kraiem
2ed54261b6 feat(api): fixed sourcemaps reader endpoint 2022-06-17 10:42:00 +02:00
Taha Yassine Kraiem
6bf5d1d65b feat(api): user trail limit changed 2022-06-17 10:41:59 +02:00
Taha Yassine Kraiem
23584b8be8 feat(alerts): changed Dockerfile.alerts 2022-06-17 10:41:59 +02:00
Taha Yassine Kraiem
f7002ab2a0 feat(api): vault support 2022-06-17 10:41:59 +02:00
Taha Yassine Kraiem
2fba643b7c feat(api): changed search user trails by username 2022-06-17 10:41:59 +02:00
Taha Yassine Kraiem
18f0d2fbca feat(api): search user trails by username
feat(db): index to search user trails by username
2022-06-17 10:41:59 +02:00
Taha Yassine Kraiem
9fcba8703e feat(api): EE updated authorizer 2022-06-17 10:41:59 +02:00
Taha Yassine Kraiem
41d7d16d03 feat(api): changed Dockerfile 2022-06-17 10:41:59 +02:00
Taha Yassine Kraiem
9100d27854 feat(api): changed root path 2022-06-17 10:41:59 +02:00
Taha Yassine Kraiem
507462180e feat(api): fixed return createdAt with the list of users 2022-06-17 10:41:58 +02:00
Taha Yassine Kraiem
7f9bc99bcf feat(DB): traces/trails index
feat(api): get all possible traces/trails actions
feat(api): search traces/trails by actions
feat(api): search traces/trails by user
2022-06-17 10:41:58 +02:00
Taha Yassine Kraiem
e95c5b915d feat(api): return createdAt with the list of users 2022-06-17 10:41:58 +02:00
Taha Yassine Kraiem
cf6320d4df feat(DB): traces/trails index
feat(api): get all traces/trails
2022-06-17 10:41:58 +02:00
Taha Yassine Kraiem
d9d2f08fb8 feat(DB): changed sessions_metadata sort expression 2022-06-17 10:41:58 +02:00
Taha Yassine Kraiem
b0d3074ceb feat(api): changed Dockerfile 2022-06-17 10:41:58 +02:00
Taha Yassine Kraiem
9c5d96e35c feat(api): changed Dockerfile 2022-06-17 10:41:58 +02:00
Taha Yassine Kraiem
9af6fc004b feat(api): changed Dockerfile 2022-06-17 10:41:58 +02:00
Taha Yassine Kraiem
1dcad02b9a feat(api): changed replay file URL 2022-06-17 10:41:58 +02:00
Taha Yassine Kraiem
1859fb8a6c feat(api): EE updated dependencies 2022-06-17 10:41:58 +02:00
Taha Yassine Kraiem
90143bcd31 feat(api): updated dependencies 2022-06-17 10:41:58 +02:00
Taha Yassine Kraiem
1224e6054e feat(api): fixed description optional value 2022-06-17 10:41:57 +02:00
Taha Yassine Kraiem
c715a6084e feat(api): fixed description default value 2022-06-17 10:41:57 +02:00
Taha Yassine Kraiem
1c671631e7 feat(api): changed Dockerfile 2022-06-17 10:41:57 +02:00
rjshrjndrn
ea103f9589 chore(vagrant): Adding development readme
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-17 10:41:55 +02:00
Rajesh Rajendran
32fdd80784 Vagrant for local contribution (#434)
* chore(vagrant): initial vagrantfile
* chore(vagrant): adding instructions after installation
* chore(vagrant): Adding vagrant user to docker group
* chore(vagrant): use local docker daemon for k3s
* chore(vagrant): fix comment
* chore(vagrant): adding hostname in /etc/hosts
* chore(vagrant): fix doc
* chore(vagrant): limiting cpu
* chore(frontend): initialize dev env
* chore(docker): adding dockerignore
* chore(dockerfile): using cache for fasten build
* chore(dockerignore): update
* chore(docker): build optimizations
* chore(build): all components build option
* chore(build): utilities build fix
* chore(scrpt): remove debug message
* chore(vagrant): provision using stable branch always

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-17 10:40:57 +02:00
Taha Yassine Kraiem
c72120ac64 feat(api): s3 helper detect environment
feat(api): support description for dashboards
2022-06-17 10:39:48 +02:00
Taha Yassine Kraiem
1e6c6fa1a7 feat(db): EE remove pages_count column 2022-06-17 10:39:48 +02:00
Taha Yassine Kraiem
d45fd1634d feat(api): EE fixed No of pages count widget 2022-06-17 10:39:48 +02:00
Taha Yassine Kraiem
9ddc0e5e4a feat(api): merge dev 2022-06-17 10:39:30 +02:00
Taha Yassine Kraiem
e322e9c3d0 feat(api): round time metrics 2022-06-17 10:33:41 +02:00
Alexander Zavorotynskiy
a153547575 feat(backend/db): send metadata directly to db (removed from batches) 2022-06-17 09:34:58 +02:00
Taha Yassine Kraiem
f9695198f2 feat(db): migrate to v1.7.0: fixed cross-database references issue 2022-06-16 19:18:52 +02:00
Taha Yassine Kraiem
621b4aae7c feat(db): migrate old funnels to new metric-funnels 2022-06-16 19:12:06 +02:00
Taha Yassine Kraiem
734320cfe5 feat(api): custom metrics errors pagination
feat(api): custom metrics sessions pagination
2022-06-16 17:49:57 +02:00
Shekar Siri
441f792679 feat(ui) - assist filters with pagination 2022-06-16 16:49:00 +02:00
Shekar Siri
133714a4cb feat(ui) - assist filters with pagination 2022-06-16 16:49:00 +02:00
Taha Yassine Kraiem
33a3890562 feat(api): fixed typo 2022-06-16 16:34:02 +02:00
rjshrjndrn
1a5c50cefa fix(helm): removing unnecessary ingress rules
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-16 15:08:43 +02:00
rjshrjndrn
54b414e199 chore(helm): adding pvc to utilities
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-16 14:17:33 +02:00
Taha Yassine Kraiem
a3aa176e67 feat(assist): support null&empty values for search
feat(assist): changed single-session search
feat(api): support null&empty values for live sessions search
feat(api): support key-mapping for different names
feat(api): support platform live-sessions search
2022-06-16 14:02:20 +02:00
Alexander Zavorotynskiy
d837c14be4 feat(backend): start using analytics topic for heuristics and trigger topic only for sessionEnd between sink and storage 2022-06-16 14:00:50 +02:00
Taha Yassine Kraiem
96bf84b567 feat(api): support nested-key-sort for live sessions 2022-06-16 12:27:51 +02:00
Taha Yassine Kraiem
fe6a50dc2c feat(assist): changed pagination response
feat(assist): allow nested-key sort
feat(api): support new live sessions pagination response
2022-06-16 11:53:49 +02:00
rjshrjndrn
75504409e7 chore(helm): Adding utilities chart
Will contain openreplay utilities. like
- efs cleaner
- postgres backup trigger etc
2022-06-16 11:14:18 +02:00
Taha Yassine Kraiem
c254aab413 feat(assist): full autocomplete
feat(assist): solved endpoints conflicts
feat(api): live sessions full autocomplete
2022-06-15 22:44:41 +02:00
Taha Yassine Kraiem
c6b719b9fa feat(assist): full search
feat(api): live sessions full search
2022-06-15 21:56:59 +02:00
Taha Yassine Kraiem
2dbdfade10 feat(assist): fixed multiple values filter support for search 2022-06-15 20:24:32 +02:00
Taha Yassine Kraiem
31a53edd5a feat(api): changed assist search payload 2022-06-15 19:25:50 +02:00
Shekar Siri
6ba773fe6d Merge branch 'dev-assist-filters' into dev 2022-06-15 19:08:01 +02:00
Shekar Siri
6144a34d75 Merge branch 'dev-funnels' into dev 2022-06-15 19:07:45 +02:00
Taha Yassine Kraiem
dd2c51e3b6 feat(assist): changed debug 2022-06-15 19:05:07 +02:00
Shekar Siri
9e87909167 feat(ui) - issues and errors widgets 2022-06-15 18:56:16 +02:00
Taha Yassine Kraiem
cf80c46cd9 feat(assist): payload extraction debug 2022-06-15 18:45:31 +02:00
Shekar Siri
c2ca867fdc change(ui) - checking for user login 2022-06-15 18:43:55 +02:00
Taha Yassine Kraiem
c53ecbef00 feat(api): assist autocomplete 2022-06-15 17:22:43 +02:00
Taha Yassine Kraiem
38be085622 feat(assist): autocomplete 2022-06-15 17:15:02 +02:00
Shekar Siri
1e78a851c6 feat(ui) - assist filters wip 2022-06-15 16:46:09 +02:00
Shekar Siri
aa669d6a86 feat(ui) - assist filters wip 2022-06-15 16:20:35 +02:00
Taha Yassine Kraiem
8510949d29 feat(assist): sessions search handle nested objects 2022-06-15 16:03:37 +02:00
Alexander Zavorotynskiy
5ea482d4c2 feat(backend/http): removed second unnecessary request body read 2022-06-15 15:50:55 +02:00
Shekar Siri
2fe2406d0c feat(ui) - assist filters wip 2022-06-15 15:29:29 +02:00
Taha Yassine Kraiem
d6070d1829 feat(api): optimized live session check
feat(assist): optimized live session check
feat(assist): sort
feat(assist): pagination
2022-06-15 15:05:41 +02:00
Shekar Siri
e5963fbeef feat(ui) - assist filters wip 2022-06-15 14:14:48 +02:00
Alexander Zavorotynskiy
56623f9635 feat(backend/db): added batch updates in web-stats methods 2022-06-15 13:20:37 +02:00
Alexander
3c6bd9613c
feat(backend): control batch size and number of sql requests in db service to more accurate management data inserts (#540)
Co-authored-by: Alexander Zavorotynskiy <alexander@openreplay.com>
2022-06-15 12:57:09 +02:00
Alexander
6b5d9d3799
feat(backend): added new trigger which sink should send to storage after session end received (#539)
Co-authored-by: Alexander Zavorotynskiy <alexander@openreplay.com>
2022-06-15 11:45:52 +02:00
Alexander
883a6f6909
Improved ender (#537)
* feat(backend/ender): using producer timestamp for session end detection

* feat(backend/ender): added timeControl module

Co-authored-by: Alexander Zavorotynskiy <alexander@openreplay.com>
2022-06-15 10:49:32 +02:00
Taha Yassine Kraiem
b85f2abfd5 feat(assist): assist changed search payload 2022-06-14 20:12:03 +02:00
Taha Yassine Kraiem
a2ec909ace feat(api): search live sessions 2022-06-14 20:09:36 +02:00
Taha Yassine Kraiem
971dbd40a4 feat(assist): assist refactored 2022-06-14 19:42:16 +02:00
Taha Yassine Kraiem
1462f90925 feat(assist): EE assist search 2022-06-14 19:37:04 +02:00
Taha Yassine Kraiem
ded2d980fe feat(assist): assist refactored 2022-06-14 18:01:52 +02:00
Taha Yassine Kraiem
d4d029c525 feat(assist): assist upgrade uWebSockets
feat(assist): assist upgrade SocketIo
2022-06-14 18:01:34 +02:00
Taha Yassine Kraiem
40836092fa feat(assist): FOSS assist search 2022-06-14 17:19:58 +02:00
Mehdi Osman
911736f772
Increased Redis max queue length 2022-06-14 16:21:15 +02:00
Taha Yassine Kraiem
b8eac83662 feat(api): requirements upgrade 2022-06-14 15:07:39 +02:00
Taha Yassine Kraiem
d478436d9b feat(api): metric-funnel changed response 2022-06-14 14:56:46 +02:00
Shekar Siri
af7f751b42 feat(ui) - issues and errors widgets 2022-06-14 14:36:08 +02:00
rjshrjndrn
ec66bc03c6 chore(helm): enable compression for nginx
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-14 13:16:54 +02:00
Shekar Siri
7874dcbe0b feat(ui) - issues and errors widgets 2022-06-14 12:47:43 +02:00
Alexander Zavorotynskiy
3059227bcd feat(backend): turn on kafka delivery reports 2022-06-14 10:19:33 +02:00
Taha Yassine Kraiem
13d71ce388 feat(api):metrics get sessions related to issue 2022-06-13 19:56:27 +02:00
Taha Yassine Kraiem
09711d4521 feat(api): metrics table of errors 2022-06-13 19:26:00 +02:00
Taha Yassine Kraiem
50dce0ee9f feat(api): custom metrics fixed templates response 2022-06-13 19:20:16 +02:00
Taha Yassine Kraiem
2a12ed7337 feat(api): optimised get issues for get session-details 2022-06-13 18:24:03 +02:00
Shekar Siri
06855c41f4 feat(ui) - issues - widget 2022-06-13 17:39:27 +02:00
Shekar Siri
bea34112c9 feat(ui) - funnels - issue details 2022-06-13 17:35:31 +02:00
Shekar Siri
44d735d0a5 Merge branch 'funnels' into deb-funnels 2022-06-13 17:21:46 +02:00
Shekar Siri
88adec9e84 feat(ui) - funnels - issue details 2022-06-13 17:20:09 +02:00
Taha Yassine Kraiem
c856b2168d feat(api): fixed custom metrics timestamp issue 2022-06-13 16:07:56 +02:00
Taha Yassine Kraiem
85c27ff0f5 feat(api): fixed login 2022-06-13 15:59:54 +02:00
Taha Yassine Kraiem
d4c7fdcc5f feat(api): get sessions details fix 2022-06-13 15:24:21 +02:00
Shekar Siri
b26f2e87bf feat(ui) - funnels - issue details 2022-06-13 14:04:16 +02:00
Taha Yassine Kraiem
2b85ad3dfc feat(api): optimised get session details 2022-06-13 13:19:24 +02:00
Shekar Siri
4e2bcf26a4 feat(ui) - funnels - issue details 2022-06-13 12:32:13 +02:00
Shekar Siri
936d1f6f6e feat(ui) - funnels - details 2022-06-13 11:35:23 +02:00
Taha Yassine Kraiem
974f78b84a feat(api): custom metrics config 2022-06-10 17:51:47 +02:00
Taha Yassine Kraiem
36e5ba6389 feat(api): limited long task DB 2022-06-10 17:36:22 +02:00
Taha Yassine Kraiem
41b96321fe feat(api): custom metrics config 2022-06-10 17:19:51 +02:00
sylenien
fee99d3bf1 fix(ui): bugfixes 2022-06-10 17:11:14 +02:00
sylenien
43f52a9dcd fix(ui): fix couple ui bugs 2022-06-10 17:11:14 +02:00
dlrm
0c4b6ab6f0 fix(ui): fix styles 2022-06-10 17:11:14 +02:00
dlrm
c90a8d558a fix(ui): env? 2022-06-10 17:11:14 +02:00
dlrm
2bc44d038e fix(ui): fix env sample 2022-06-10 17:11:14 +02:00
dlrm
f745b9ba51 fix(ui): fix env sample 2022-06-10 17:11:14 +02:00
dlrm
f7eb848706 fix(ui): fixes after webpack update 2022-06-10 17:11:14 +02:00
dlrm
f08d8ca07e fix(ui): webpack 2022-06-10 17:11:14 +02:00
sylenien
aca4ef697e fix(ui): fix icon positioning on a timeline 2022-06-10 17:11:14 +02:00
sylenien
55f58487f5 fix(ui): fix performance tab graph mapper 2022-06-10 17:11:14 +02:00
sylenien
05c8bf4d59 fix(ui): red color changes, menu controls, performance crash 2022-06-10 17:11:14 +02:00
sylenien
997a5421ae fix(ui): small design fixes 2022-06-10 17:11:14 +02:00
sylenien
8a2d777d8c fix(ui): small fixes to share popup, archive inspector 2022-06-10 17:11:14 +02:00
sylenien
9caaabcacc fix(ui): move issues button to the subheader 2022-06-10 17:11:14 +02:00
sylenien
43a1991300 fix(ui): ui fixes 2022-06-10 17:11:14 +02:00
sylenien
c60b060cbe fix(ui): unblock tabs when in inspector mode, turn off inspector on tab change 2022-06-10 17:11:14 +02:00
sylenien
13dff716ea fix(ui): fix ui bugs 2022-06-10 17:11:14 +02:00
sylenien
366314193e fix(ui): design review fixes 2022-06-10 17:11:14 +02:00
sylenien
6e24da549a fix(ui): live session fixes 2022-06-10 17:11:14 +02:00
sylenien
02c87d237d feat(ui): change player control tabs designs 2022-06-10 17:11:14 +02:00
sylenien
b1d903f7f6 fix(ui): design fixes 2022-06-10 17:11:14 +02:00
sylenien
83600ee04d fix(ui): minor changes 2022-06-10 17:11:14 +02:00
sylenien
dbae4fe353 feat(ui): player controls redesign 2022-06-10 17:11:14 +02:00
sylenien
35d258aa8c fix(ui): design review fixes 2022-06-10 17:11:14 +02:00
sylenien
042571193a fix(ui): minor bugs 2022-06-10 17:11:14 +02:00
sylenien
3031569c07 fix(ui): ui fixes after design review 2022-06-10 17:11:14 +02:00
sylenien
9d06a95c7a fix(ui): fix active sessions 2022-06-10 17:11:14 +02:00
sylenien
ce5affddd6 fix(ui): fix styles in player header 2022-06-10 17:11:14 +02:00
sylenien
3444b73ed0 fix(ui): show events serach by default 2022-06-10 17:11:14 +02:00
sylenien
2109808d61 fix(ui): fix tooltip for subheader 2022-06-10 17:11:14 +02:00
sylenien
197694be73 fix(ui): rm test code 2022-06-10 17:11:14 +02:00
sylenien
ff73c70bfd fix(ui): fix warnings for few components 2022-06-10 17:11:14 +02:00
sylenien
1e51e3bce8 feat(ui): change eventgroup sidebar 2022-06-10 17:11:14 +02:00
sylenien
5e296703b0 fix(ui): fix typo 2022-06-10 17:11:14 +02:00
sylenien
05ecce9c74 feat(ui): add urlref bad to subheader 2022-06-10 17:11:14 +02:00
sylenien
5f5f47b06b fix(ui): rm unused code 2022-06-10 17:11:14 +02:00
sylenien
e3099bf93d fix(ui): return subheader 2022-06-10 17:11:14 +02:00
sylenien
0ab16ce91c fix(ui): fix for cicd 2022-06-10 17:11:14 +02:00
sylenien
a7d032bb29 fix(ui): rename file 2022-06-10 17:11:14 +02:00
sylenien
6b34630fa1 fix(ui): minor bugfix 2022-06-10 17:11:14 +02:00
sylenien
c584b0f653 feat(ui): change events tab design, move action buttons to subheader 2022-06-10 17:11:14 +02:00
sylenien
aff6f54397 fix(ui): fix sessionlist modal 2022-06-10 17:11:14 +02:00
sylenien
3aac6cf130 feat(ui): redesign player header; move user data to header 2022-06-10 17:11:14 +02:00
Taha Yassine Kraiem
dc02594da8 feat(api): optimised weekly report 2022-06-10 16:31:08 +02:00
Taha Yassine Kraiem
e796e6c795 feat(api): fixed login response 2022-06-10 15:49:24 +02:00
Taha Yassine Kraiem
8d4d61103a feat(api): fixed login response 2022-06-10 15:44:05 +02:00
Taha Yassine Kraiem
3217a55bca feat(api): changed login response 2022-06-10 15:29:54 +02:00
Taha Yassine Kraiem
0886e3856a feat(api): EE changed weekly report
feat(api): changed login response
2022-06-10 12:33:36 +02:00
Taha Yassine Kraiem
5592e13d9b feat(api): fixed weekly report
feat(api): optimised weekly report
2022-06-10 12:31:29 +02:00
Taha Yassine Kraiem
4305e03745 feat(api): ignore weekly report if SMTP not configured 2022-06-10 11:53:47 +02:00
Taha Yassine Kraiem
e1b233bac8 feat(api): changed connexion pool configuration
feat(alerts): changed connexion pool configuration
2022-06-10 11:35:25 +02:00
sylenien
684f1598bc feat(tracker): add option to hide dom nodes 2022-06-10 09:51:40 +02:00
Alexander Zavorotynskiy
ea658316a2 fix(backend): fixed panic in kafka consumer 2022-06-10 09:45:50 +02:00
Alexander Zavorotynskiy
b646ba2a9e fix(backend): fixed panic in db service 2022-06-10 09:31:54 +02:00
Taha Yassine Kraiem
b16b3e3b87 feat(api): changes 2022-06-09 17:37:49 +02:00
Taha Yassine Kraiem
656e13f6e5 feat(api): changes
feat(db): changes
2022-06-09 17:23:17 +02:00
Taha Yassine Kraiem
6e5bdae7da feat(api): changed pages_response_time_distribution response 2022-06-09 14:12:21 +02:00
Taha Yassine Kraiem
c81ce9bf7d feat(api): changed crashes response 2022-06-09 14:09:13 +02:00
Taha Yassine Kraiem
6e9e5dceb7 feat(api): changed speed_location response 2022-06-09 13:54:25 +02:00
Taha Yassine Kraiem
89b3d84230 feat(api): changed speed_location response 2022-06-09 13:53:55 +02:00
Taha Yassine Kraiem
9411f0f576 feat(api): changed slowest_domains response 2022-06-09 13:42:52 +02:00
dlrm
3b8a2c19ef fix(tracker): code style 2022-06-09 13:36:28 +02:00
dlrm
c913e4e7f6 fix(tracker): code rvw 2022-06-09 13:36:28 +02:00
dlrm
9158fa60c5 fix(tracker): fix tracker date recording, added new obscure dates opt
fix(tracker): rm consolelog

fix(tracker): change compile import

fix(tracker): fix node v and import
2022-06-09 13:36:28 +02:00
Taha Yassine Kraiem
7b1e854c53 feat(api): table of sessions widget 2022-06-09 13:13:05 +02:00
Taha Yassine Kraiem
adb8e2c404 feat(api): errors widget chart
feat(api): funnels widget chart
2022-06-08 19:03:06 +02:00
Taha Yassine Kraiem
6816dedaff feat(api): errors widget 2022-06-08 17:21:13 +02:00
Shekar Siri
a461ad0938 change(ui) - sessions daterange 2022-06-08 16:55:29 +02:00
Shekar Siri
4188b7894d change(ui) - tracking code changes 2022-06-08 16:29:00 +02:00
Shekar Siri
e652ee97ba pulled webpack changes and resolved conflicts 2022-06-08 16:16:41 +02:00
Shekar Siri
f235da44ab pulled webpack changes and resolved conflicts 2022-06-08 16:04:52 +02:00
Shekar Siri
767376a8db change(ui) - notifications count and list with mobx 2022-06-08 15:50:29 +02:00
Shekar Siri
d8911e93c1 change(ui) - notifications count and list 2022-06-08 15:50:29 +02:00
Shekar Siri
8273fc08bc change(ui) - login align 2022-06-08 15:50:29 +02:00
Alexander
e749ed1823
Merge pull request #531 from openreplay/assets_fix
Assets fix
2022-06-08 15:08:25 +02:00
Alexander Zavorotynskiy
2dccb2142b fix(backend/assets): return back cache checks in s3 2022-06-08 15:05:02 +02:00
Alexander Zavorotynskiy
404f6204e1 fix(backend/assets): copy ts and index in assets convert method 2022-06-08 14:44:16 +02:00
Alexander Zavorotynskiy
248d3b2c3d fix(backend/assets): changed comment 2022-06-08 13:17:37 +02:00
rjshrjndrn
9388e03e8c fix(ingress): assets ingress values
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-08 12:47:24 +02:00
Alexander Zavorotynskiy
c4081ce78a feat(backend/assets): disabled cache checks 2022-06-08 12:38:38 +02:00
Taha Yassine Kraiem
b2a778a0d7 feat(api): funnel widget issues 2022-06-07 20:10:40 +02:00
Taha Yassine Kraiem
1445c72737 feat(api): funnel widget 2022-06-07 19:17:55 +02:00
Taha Yassine Kraiem
734d1333a9 feat(api): EE fixed edition 2022-06-07 18:34:52 +02:00
Taha Yassine Kraiem
932c18f65a feat(api): fixed notifications count query 2022-06-07 18:18:22 +02:00
Taha Yassine Kraiem
3a70c8bef6 feat(api): fixed edition
feat(api): fixed expiration date
feat(api): fixed change name
feat(api): fixed change role
feat(api): fixed has password
feat(api): refactored edit user
feat(api): refactored edit member
2022-06-07 18:12:08 +02:00
rjshrjndrn
a996fac4d3 fix(ingress): assets path
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-07 17:31:51 +02:00
rjshrjndrn
8ce66d0ffc fix(build): frontend build command
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-07 16:42:15 +02:00
rjshrjndrn
4986708006 build(frontend): changed env file
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-07 15:46:50 +02:00
Shekar Siri
b1ce794c06 change(ui) - Tenant Key checking for ee edition 2022-06-07 15:41:36 +02:00
Shekar Siri
d8dcfe4b5e change(ui) - removed client and updated account 2022-06-07 15:41:36 +02:00
Shekar Siri
7a3b13ff8a change(ui) - Tenant Key checking for ee edition 2022-06-07 15:41:36 +02:00
Shekar Siri
c3d4470bb1 change(ui) - removed appearnace 2022-06-07 15:41:36 +02:00
Alexander Zavorotynskiy
0b0798b0ef feat(backend/assets): added metric (total_assets) 2022-06-07 14:14:18 +02:00
Alexander Zavorotynskiy
9292d315c4 feat(backend/ender): removed debug log 2022-06-07 13:48:10 +02:00
Alexander Zavorotynskiy
7678e9d056 fix(backend/db): fixed loss of sessions 2022-06-07 13:44:20 +02:00
Alexander Zavorotynskiy
4f8c4358f8 fix(backend/storage): fixed panic in storage service 2022-06-07 13:30:48 +02:00
Shekar Siri
329ae62881 change(ui) - input class 2022-06-07 12:08:15 +02:00
Shekar Siri
65331ca016 change(ui) - code snippet 2022-06-07 12:04:43 +02:00
Shekar Siri
cb5809608a change(ui) - code snippet 2022-06-07 11:59:42 +02:00
Alexander Zavorotynskiy
78cf538b6b feat(backend): added metrics to storage and sink services 2022-06-07 10:12:42 +02:00
Taha Yassine Kraiem
cbe78cc58e feat(db): removed user's appearance
feat(db): removed generated_password
feat(api): merged account&client
feat(api): cleaned account response
feat(api): removed user's appearance
feat(api): removed generated_password
feat(api): limits endpoint
feat(api): notifications/count endpoint
2022-06-06 19:33:26 +02:00
Alexander Zavorotynskiy
a6db2cb602 feat(backend): added metrics to http service 2022-06-06 16:46:14 +02:00
Alexander Zavorotynskiy
c963b74cbf feat(backend): cleaned up in internal dir 2022-06-06 14:13:24 +02:00
Taha Yassine Kraiem
a6c75d3cdd Merge remote-tracking branch 'origin/dev' into api-v1.6.1
# Conflicts:
#	api/Dockerfile
#	api/development.md
#	backend/Dockerfile.bundle
#	backend/build.sh
#	backend/development.md
#	backend/internal/assets/jsexception.go
#	backend/internal/handlers/ios/performanceAggregator.go
#	backend/pkg/intervals/intervals.go
#	backend/pkg/log/queue.go
#	backend/pkg/messages/filters.go
#	backend/pkg/messages/legacy-message-transform.go
#	backend/pkg/messages/messages.go
#	backend/pkg/messages/read-message.go
#	backend/services/db/heuristics/anr.go
#	backend/services/db/heuristics/clickrage.go
#	backend/services/db/heuristics/heuristics.go
#	backend/services/db/heuristics/readyMessageStore.go
#	backend/services/db/heuristics/session.go
#	backend/services/db/stats.go
#	backend/services/ender/builder/builderMap.go
#	backend/services/ender/builder/clikRageDetector.go
#	backend/services/ender/builder/cpuIssueFinder.go
#	backend/services/ender/builder/deadClickDetector.go
#	backend/services/ender/builder/domDropDetector.go
#	backend/services/ender/builder/inputEventBuilder.go
#	backend/services/ender/builder/memoryIssueFinder.go
#	backend/services/ender/builder/pageEventBuilder.go
#	backend/services/ender/builder/performanceTrackAggrBuilder.go
#	backend/services/http/assets.go
#	backend/services/http/handlers-depricated.go
#	backend/services/http/ios-device.go
#	backend/services/integrations/clientManager/manager.go
#	backend/services/storage/gzip.go
#	backend/services/storage/main.go
#	ee/api/clean.sh
#	scripts/helmcharts/local_deploy.sh
#	scripts/helmcharts/vars.yaml
2022-06-03 17:06:25 +01:00
Taha Yassine Kraiem
31a577b6cc feat(db): EE CH new structure 2022-06-03 16:56:37 +01:00
Shekar Siri
a7bfbc8ff7 change(ui) - config changes 2022-06-03 17:18:17 +02:00
Shekar Siri
2ed5cac986
Webpack upgrade and dependency cleanup (#523)
* change(ui) - webpack update
* change(ui) - api optimize and other fixes
2022-06-03 16:47:38 +02:00
rjshrjndrn
f5e013329f chore(action): removing unnecessary file 2022-06-03 16:32:06 +02:00
Alexander Zavorotynskiy
d358747caf fix(backend): several fixes in backend services 2022-06-03 16:01:14 +02:00
Alex Kaminskii
d0e651bc29 fix(tracker): uncomment init scroll tracking 2022-06-03 14:19:39 +02:00
Alex Kaminskii
e57d90e5a1 fix(tracker): use node guards instead of instanceof in some cases; import type App 2022-06-03 14:17:53 +02:00
Alex Kaminskii
1495f3bc5d fix(backend/ee/kafka): Partition-wise back-commit 2022-06-03 13:52:31 +02:00
rjshrjndrn
f626636ed7 chore(helm): enable cors for ingest
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-06-03 13:04:11 +02:00
rjshrjndrn
06eeabe494 chore(actions): enable build from branch 2022-06-03 12:48:24 +02:00
Alexander Zavorotynskiy
d68ac74731 feat(backend/http): added OPTIONS method for all paths 2022-06-03 11:13:56 +02:00
Alexander Zavorotynskiy
d4e5fce12a feat(backend/http): added prefix hack 2022-06-03 10:52:12 +02:00
Alex Kaminskii
7395688831 fix(backend/http): check if order of declaring gets influence 2022-06-02 19:04:48 +02:00
Eric Chan
c2695ef31f allow use of localStorage and sessionStorage to be overriden 2022-06-02 17:49:05 +02:00
Alexander Zavorotynskiy
1a8c076b41 fix(backend/http): added prefligt headers to root 2022-06-02 17:39:38 +02:00
Taha Yassine Kraiem
e7e0296b6b feat(db): EE CH new structure 2022-06-02 12:37:52 +01:00
Alexander Zavorotynskiy
2fb57962b8 feat(backend/sink): added last session ts in sink logs 2022-06-02 10:50:14 +02:00
Alexander Zavorotynskiy
485865f704 fix(backend/storage): fixed ts of last processed session in logs 2022-06-02 10:27:32 +02:00
Alexander Zavorotynskiy
2cadf12f88 feat(backend/storage): added counter and last session timestamp for storage service 2022-06-02 10:13:18 +02:00
Taha Yassine Kraiem
caaf7793e3 feat(db): EE CH new structure 2022-06-01 19:51:42 +01:00
rjshrjndrn
f330d5031f chore(helm): adding grafana ingress
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-31 19:21:45 +02:00
Taha Yassine Kraiem
95088518aa feat(api): clean script 2022-05-31 13:46:13 +01:00
Alexander Zavorotynskiy
3a4d5f6796 feat(backend/sink): added additional log on producer write operation 2022-05-31 14:43:56 +02:00
Taha Yassine Kraiem
b1aae16f60 feat(api): refactored user-auth 2022-05-31 10:14:55 +01:00
Alexander Zavorotynskiy
6e92ba2e79 feat(backend/ender): added additional log for ender service 2022-05-31 10:40:44 +02:00
Alexander Zavorotynskiy
df18e7dd7d feat(backend/storage): additional log and memory improvements in storage service 2022-05-31 10:02:31 +02:00
Alexander Zavorotynskiy
0b7bb2339d fix(backend/datasaver): changed postgres on clickhouse and added missed imports 2022-05-30 17:41:45 +02:00
rjshrjndrn
440efd1b5d chore(helm): increase health check timeout
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-30 17:30:35 +02:00
rjshrjndrn
6aaa0b5fb8 chore(helm): chalice health check
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-30 17:20:28 +02:00
Alexander Zavorotynskiy
d871558390 fix(backend/storage): fixed bug with large session files 2022-05-30 16:59:41 +02:00
Alexander Zavorotynskiy
24fdb5e18c fix(backend/http): fixed bug with aws health checks 2022-05-30 16:39:05 +02:00
ShiKhu
0f434a21d4 fix(tracker): 3.5.12: resolve Promise returning on start() with success:false instead of rejecting 2022-05-27 21:25:21 +02:00
ShiKhu
3555864580 fix(backend-db): log session-not-found only once 2022-05-27 12:55:15 +02:00
ShiKhu
edddf87e5f fix(frontend): resources status fix 2022-05-27 12:38:05 +02:00
Alexander Zavorotynskiy
0fe1b0c3a8 fix(backend/storage): fixed panic in storage service 2022-05-27 10:22:19 +02:00
Rajesh Rajendran
3a2b54a446
Fixes related to clickhouse and service Port for ingress (#510)
* chore(helm): variablizing clickhouse shards/replica

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* chore(clickhouse): adding new template for clickhouse cluster

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* chore(helm): enable passwordless clickhouse

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* chore(install): check clickhouse is up prior initialization

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* fix(helm): port value for ingress
2022-05-25 16:53:24 +00:00
Rajesh Rajendran
55a0d3a0e0
chore(helm): enable serviceMonitor only if monitoring stack installed. (#509) 2022-05-25 16:11:09 +00:00
Rajesh Rajendran
2752118e94
fix(helm): clickhouse change port type to integer (#508)
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-25 16:00:13 +00:00
Rajesh Rajendran
c795e0480d
fix(helm): service port installation issue (#507)
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-25 15:58:48 +00:00
Alex Kaminskii
d7dc6e0860 fix(player): apply scrolls after styles 2022-05-25 15:24:10 +02:00
Rajesh Rajendran
2a870d6f74
chore(helm): enabling monitoring for services (#503) 2022-05-24 17:49:24 +00:00
Alexander Zavorotynskiy
a32ac65f35 feat(backend): additional logs in messageHandler 2022-05-24 16:27:35 +02:00
Alexander Zavorotynskiy
ca78bca3d1 chore(helmchart): added missed part of yaml file to sink helm chart 2022-05-24 13:39:53 +02:00
Alexander Zavorotynskiy
31c852df2b feat(backend/sink): added error log for consumer.Commit() method 2022-05-24 13:30:25 +02:00
Alexander Zavorotynskiy
204c6f589b feat(backend/sink): small changes 2022-05-24 13:24:00 +02:00
Alexander Zavorotynskiy
8647beb538 chore(helmchart): added ASSETS_ORIGIN to sink helm chart 2022-05-24 13:21:38 +02:00
Alexander
c6f54f18aa
Merge pull request #502 from openreplay/message_timestamp_changes
Message timestamp changes
2022-05-24 13:02:16 +02:00
Alexander Zavorotynskiy
c941cb872a feat(backend/messages): added timestamp for SessionStart and moved RawErrorEvent to db datasaver 2022-05-24 10:33:16 +02:00
Alexander Zavorotynskiy
d685ad4cb3 feat(backend/ender): implemented metrics module and added to ender service 2022-05-23 17:48:24 +02:00
Alexander Zavorotynskiy
d29416fd48 fix(backend): fixed bug with group name in heuristics service 2022-05-23 17:42:28 +02:00
sylenien
07072f74b0 fix(ui): fix text overflow 2022-05-23 11:05:03 +02:00
sylenien
a06fb42e12 fix(ui): fix bugs with metric updating, metric selection hover etc 2022-05-23 11:05:03 +02:00
sylenien
40ab7d1e41 fix(ui): minor fixes for sesson settings 2022-05-23 11:05:03 +02:00
sylenien
d4fa960fdf fix(ui): make dashboardeditModal closable with esc 2022-05-23 11:05:03 +02:00
sylenien
6a801a2026 fix(ui): make menuitem configurable 2022-05-23 11:05:03 +02:00
sylenien
af45af8bd0 fix(ui): design review - dashboard metric selection 2022-05-23 11:05:03 +02:00
sylenien
a489a8b77e fix(ui): design review - saved search 2022-05-23 11:05:03 +02:00
sylenien
144f596144 fix(ui): rm consolelog 2022-05-23 11:05:03 +02:00
sylenien
e47797ee3e fix(ui): minor ui fixes after review 2022-05-23 11:05:03 +02:00
sylenien
020b993280 fix(ui): fix description input focus 2022-05-23 11:05:03 +02:00
sylenien
4efe7a7843 feat(ui): add icon to metric creation box 2022-05-23 11:05:03 +02:00
Alex Kaminskii
30d6f2489c feat (tracker-assist): 3.5.11: RemoteControl: better scroll element detection; maintain react tight state input value 2022-05-20 22:38:13 +02:00
Alex Kaminskii
62e163fb40 fix(player-assist): ignore tab press during remote control 2022-05-20 22:26:22 +02:00
Alex Kaminskii
d30b663195 fix(player): use append() instead of add(); update lastMessageTime inside distributeMessage 2022-05-20 19:05:32 +02:00
Taha Yassine Kraiem
b5540998d9 feat(api): metrics changed web vitals description
feat(db): changed metric's monitoring essentials category to web vitals
2022-05-20 11:20:25 +02:00
rjshrjndrn
40e0296c8a docs(machine setup): for contribution
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-19 19:05:53 +02:00
rjshrjndrn
9526ea68aa chore(helm): clickhouse use kafka zookeeper
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-19 18:27:00 +02:00
Alex Kaminskii
18a09cf66b fix(frontend/player): codefix 2022-05-19 17:52:49 +02:00
Alex Kaminskii
cecd57fc50 fix(frontend): maintain string mobsUrl for the smooth version transition 2022-05-19 17:29:15 +02:00
Rajesh Rajendran
97094107fe
GH actions for ee (#488)
* chore(actions): changing installation method

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* fix(actions): inject ee license key and image tag

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* fix(actions): image tag overload

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-19 15:04:01 +00:00
Rajesh Rajendran
2e332f3447
Openreplay install, without kubernetes and related tools (#487)
* chore(init script): option to skip k8s/tools installation

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* chore(install): init script gnu sed detection

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-19 13:21:37 +00:00
sylenien
d08280b709 fix(ui): fix text size 2022-05-19 15:11:17 +02:00
sylenien
c82efbeb6b fix(ui): bug fixes for dashboard 2022-05-19 15:11:17 +02:00
sylenien
580641efe8 fix(ui): fix css files 2022-05-19 15:11:17 +02:00
sylenien
bb4aafa1df fix(ui): code rvw 2022-05-19 15:11:17 +02:00
sylenien
d9a01b3380 feat(ui): move create metric button to the grid 2022-05-19 15:11:17 +02:00
sylenien
69002865d6 fix(ui): remove unnecessary code 2022-05-19 15:11:17 +02:00
sylenien
cde2a6e2d5 fix(ui): fix metric category max height calculation 2022-05-19 15:11:17 +02:00
sylenien
eaf162c5f8 fix(ui): minor metric hover styles fixes 2022-05-19 15:11:17 +02:00
sylenien
e8f7e2e9be feat(ui): make edit metric title hoverable and clickable, create plain text button for future usage 2022-05-19 15:11:17 +02:00
Taha Yassine Kraiem
6df7bbe7d1 feat(api): fixed changed SearchSession payload schema 2022-05-18 20:02:09 +02:00
Taha Yassine Kraiem
4a55d93f52 feat(api): changed SearchSession payload schema 2022-05-18 19:43:18 +02:00
Taha Yassine Kraiem
2544a3e166 feat(api): centralized 'order'
feat(api): transform 'order' casing
2022-05-18 19:08:08 +02:00
ShiKhu
babe654329 Merge branch 'assist-fixes' into dev 2022-05-18 17:55:25 +02:00
ShiKhu
84b99616bd chore(tracker-assist): fix package number string 2022-05-18 17:43:31 +02:00
ShiKhu
8b0ad960e9 Merge branch 'assist-fixes' of github.com:openreplay/openreplay into assist-fixes 2022-05-18 17:29:26 +02:00
ShiKhu
613bed393a fix(player): take into account first message time 2022-05-18 17:29:17 +02:00
Shekar Siri
dce918972f change(ui) - enable annotation on call or remote 2022-05-18 17:27:11 +02:00
ShiKhu
9294748352 fix(frontend-assist): toggleAnnotation incapsulate + fix inverse booleans 2022-05-18 17:17:11 +02:00
ShiKhu
f8bbc16208 fix(frontend-player):apply set_input_value on blure if focused (for the case of remote controle) 2022-05-18 16:49:36 +02:00
ShiKhu
b283b89bd2 feat(tracker-assist): annotation available on RemoteControl as well 2022-05-18 16:01:18 +02:00
Shekar Siri
437341257c change(ui) - enable annotation without call 2022-05-18 15:49:58 +02:00
Alex Kaminskii
1f80cb4e64 Merge branch 'small-player-refactoring' into dev 2022-05-18 15:25:34 +02:00
Alex Kaminskii
bd6dba4781 fix(tracker-assisst): ConfirmWindow: override default button style & separate defaults 2022-05-18 14:50:56 +02:00
rjshrjndrn
bda652ccab fix(helm): service name
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-18 14:05:08 +02:00
Alex Kaminskii
4c8751944c style(tracker-*): do not store lock files under the npm puckage dirs 2022-05-18 13:57:38 +02:00
Alexander Zavorotynskiy
a9071b68f2 chore(bash): added heuristics service to local_build.sh 2022-05-18 13:30:21 +02:00
Alexander Zavorotynskiy
8d0d05c2cf fix(backend/heuristics): fixed panic in performanceAggr message encoding 2022-05-18 13:28:00 +02:00
Shekar Siri
ab2a800b7c merged vault (from main) and resolved conflicts 2022-05-18 12:52:26 +02:00
Shekar Siri
9ea1992b34 merged vault (from main) and resolved conflicts 2022-05-18 12:51:26 +02:00
rjshrjndrn
336046a443 chore(helm): common naming convention 2022-05-18 12:39:13 +02:00
Rajesh Rajendran
5041bcb177
GH aciton with new format (#479)
* chore(actions): update GH Actions to new deployment format

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* fix(actions): yaml indentation

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* fix(actions): image override

helm doesn't support multipart yaml files.

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* chore(action): enable docker image cache

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* chore(actions): chalice deployment

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* chore(actions): check previous image prior deploying

Because we're using an umbrella chart and not storing the image tags
which is deployed from actions anywhere, a new deployment will reset all
older deployed image tags. For that we've to fetch the existing image
tags and feed it to the current deployment.

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* fix(actions): static path the build input

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>

* chore(actions): adding dev branch to chalice deployment

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-17 20:07:02 +00:00
sylenien
631f427f67 fix(ui): fix typo 2022-05-17 18:20:34 +02:00
sylenien
fcd79a6fb7 fix(ui): fix weird scrolling 2022-05-17 18:20:34 +02:00
sylenien
ff02248900 fix(ui): remove additional divider line, fix zindex for menu 2022-05-17 18:20:34 +02:00
sylenien
8e58e68607 fix(ui): fix descr position, fix card click, rm unneeded code 2022-05-17 17:57:03 +02:00
sylenien
07d2c0427d feat(ui): add hovers to metric widgets for dashboard and template comps 2022-05-17 17:57:03 +02:00
sylenien
c1af05fbbe fix(ui): fix metrics table width, fix reload pathing 2022-05-17 17:57:03 +02:00
sylenien
25f792edc2 fix(ui): fix dashboard pinning and state updating; fix menu items naming 2022-05-17 17:57:03 +02:00
sylenien
9960927ca0 fix(ui): fix show more button for metric adding 2022-05-17 17:57:03 +02:00
sylenien
14ef2cba26 fix(ui): fix tooltip behavior on a metric widget 2022-05-17 17:57:03 +02:00
sylenien
30add0fd3c fix(ui): rm consolelog 2022-05-17 17:57:03 +02:00
sylenien
749093d9f6 fix(ui): fix routing in dashboards 2022-05-17 17:57:03 +02:00
rjshrjndrn
d7037771ed chore(helmcharts): adding heuristics service
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-17 12:15:52 +02:00
sylenien
0617e8b485 fix(ui): fix icons generation script to properly trim svg attrs 2022-05-17 11:19:42 +02:00
sylenien
536bacad64 fix(ui): rm conflicting code 2022-05-17 11:19:42 +02:00
sylenien
a3aecae559 fix(ui): fix text on widget updates, remove back link on metrics page and add brdcmbs 2022-05-17 11:19:42 +02:00
sylenien
33ff7914be fix(ui): remove state updates on unmounted components 2022-05-17 11:19:42 +02:00
sylenien
cba53fa284 fix(ui): fix comments in iconsjs 2022-05-17 11:19:42 +02:00
sylenien
a2c999ccef fix(ui): fix weird wording, bug with svg 2022-05-17 11:19:42 +02:00
sylenien
fec8b9e13c fix(ui): fix clipping bg on hover, fix side menu header 2022-05-17 11:19:42 +02:00
sylenien
8a29f8ecf4 fix(ui): wording, keys warnings 2022-05-17 11:19:42 +02:00
sylenien
bb33ea4714 fix(ui): lettering fixes, move create dashboard to sidebar title 2022-05-17 11:19:42 +02:00
sylenien
5c7f6c1738 fix(ui): fix messages for empty dashboad 2022-05-17 11:19:42 +02:00
rjshrjndrn
f66e780596 chore(ingress): changing proxy body size to 10m
else nginx will reject the change, and AWS will report as CORS issue.

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-16 21:14:14 +02:00
Alex Kaminskii
8ff0249814 few files to ts 2022-05-16 20:25:15 +02:00
Alex Kaminskii
d495f1aa97 style(player): few renamings 2022-05-16 20:02:28 +02:00
Alex Kaminskii
7929a8ceca refactor(player): move lists to separate file + renaming 2022-05-16 19:55:45 +02:00
Shekar Siri
82ad650f0c feat(ui) - sessions - widget 2022-05-16 19:11:53 +02:00
Alexander Zavorotynskiy
94c56205b9 fix(backend): added error log in kafka producer 2022-05-16 18:56:43 +02:00
Taha Yassine Kraiem
f054b130bf feat(DB): changed metrics category from Overview to Monitoring Essentials 2022-05-16 18:24:16 +02:00
Shekar Siri
acdd3596bc fix(ui) - assist reload remove click event params 2022-05-16 17:05:23 +02:00
Shekar Siri
f1d94c5378 feat(ui) - errors - widget 2022-05-16 17:04:10 +02:00
Shekar Siri
baa6c916dc feat(ui) - funnels - fitler dropdowns to select 2022-05-16 16:26:16 +02:00
Alex Kaminskii
76d9d41ed8 refactor(backend/storage): pass FileSplitSize as env var 2022-05-16 15:31:37 +02:00
Alex Kaminskii
7d7dcc2910 chore (backend): Dockerfile.bundle update 2022-05-16 15:28:56 +02:00
rjshrjndrn
3b704b9430 fix(helm): nginx forward L7 headers from LB
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-16 15:02:59 +02:00
Alexander Zavorotynskiy
f681e85e50 fix(backend): removed temp Dockerfile from cmd dir 2022-05-16 15:01:12 +02:00
sylenien
90299d9d6d fix(ui): rm consolelog 2022-05-16 14:53:40 +02:00
sylenien
09056c103c feat(ui): moved saved search list to new modal component 2022-05-16 14:53:40 +02:00
sylenien
69b75f5b56 fix(ui): various small ui fixes for buttons 2022-05-16 14:53:40 +02:00
sylenien
e5842939db feat(ui): added success notif for settings updates 2022-05-16 14:53:40 +02:00
sylenien
387e946dfe fix(ui): removed popup from country flag component; added bg to toggler head 2022-05-16 14:53:40 +02:00
sylenien
e1ae8bae20 fix(ui): removed popup from country flag component 2022-05-16 14:53:40 +02:00
sylenien
ac7a70ea62 fix(ui): fixed search bar to properly include sections and filters 2022-05-16 14:53:40 +02:00
Alexander Zavorotynskiy
0028de2d11 fix(backend): removed service dir from Dockerfile 2022-05-16 14:50:32 +02:00
Alex K
22606aca62
Merge pull request #475 from openreplay/integrations_refactoring
Integrations to golang standart filestructure
2022-05-16 14:48:00 +02:00
Alex Kaminskii
e26ce2e963 fix(backend-ee/clickhouse): do not insert metod & status into resources as they are always unknown 2022-05-16 14:41:44 +02:00
Alexander Zavorotynskiy
3511534cbb feat(backend/integrations): service refactoring 2022-05-16 14:41:12 +02:00
Shekar Siri
97da3f5c1c Merge branch 'dev' of github.com:openreplay/openreplay into funnels 2022-05-16 14:38:00 +02:00
Shekar Siri
ebbc9cc984 fix(ui) - alert form footer bg 2022-05-16 14:18:52 +02:00
Alex K
d996b14ff8
Merge pull request #474 from openreplay/assets_refactoring
* Assets to golang standart filestructure
2022-05-16 14:18:04 +02:00
Alexander Zavorotynskiy
3449440de3 feat(backend/assets): service refactoring 2022-05-16 14:12:37 +02:00
Shekar Siri
d36d4862cf fix(ui) - chart y axis numbers 2022-05-16 14:12:16 +02:00
Alexander
356bf32bfc
Merge pull request #473 from openreplay/storage_refactoring
Storage refactoring
2022-05-16 12:56:22 +02:00
Alexander Zavorotynskiy
24f64af95a feat(backend/storage): service refactoring 2022-05-16 12:52:43 +02:00
rjshrjndrn
4175d98be8 chore(helmcharts): adding clickhouse operator helm chart
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-16 12:52:43 +02:00
rjshrjndrn
c94f4074bb chore(helm): make ingress-nginx installation not mandatory.
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-16 10:48:35 +02:00
Taha Yassine Kraiem
c84d39d38e feat(api): upgraded python base image
feat(alerts): upgraded python base image
2022-05-13 19:15:31 +02:00
Shekar Siri
05bd61b83c feat(ui) - funnels - issues sort 2022-05-13 19:03:01 +02:00
Alexander
a69f3f0e83
Merge pull request #459 from openreplay/ender_refactoring
Ender refactoring
2022-05-13 17:32:14 +02:00
Alexander Zavorotynskiy
44dae11886 feat(backend/db): fixed ee version 2022-05-13 17:00:09 +02:00
Shekar Siri
3baa3ea9a5 Merge branch 'dev' of github.com:openreplay/openreplay into funnels 2022-05-13 16:05:30 +02:00
Shekar Siri
f6bd3dd0dd feat(ui) - funnels - details wip 2022-05-13 16:05:11 +02:00
sylenien
58397e6c6c fix(ui): remove attrs from icons 2022-05-13 15:56:26 +02:00
sylenien
d72f47b296 fix(ui): fix prop types for sessionitem 2022-05-13 15:56:26 +02:00
sylenien
cea1218613 fix(ui): fix typo in comment 2022-05-13 15:56:26 +02:00
sylenien
e7a31dbb8c fix(ui): refactor sessionitem 2022-05-13 15:56:26 +02:00
sylenien
19178807f8 fix(ui): fixed sessionitem types and removed withrouter connection 2022-05-13 15:56:26 +02:00
sylenien
be13ff5f7a fix(ui): fixed sessionitem and timezone dropdown connection to mobx 2022-05-13 15:56:26 +02:00
sylenien
0d00cf0349 more search field fixes 2022-05-13 15:56:26 +02:00
sylenien
64ebd07e57 added toggler disabled colors, visibility default values, no items warning text to search field 2022-05-13 15:56:26 +02:00
sylenien
1529510d25 removed browser autocomplete from filter inputs, removed timezone picker from main page 2022-05-13 15:56:26 +02:00
sylenien
1f0fb80024 fix category and filters naming, add underline to username hover, fix small bugs 2022-05-13 15:56:26 +02:00
sylenien
7005c046b8 fix ui bugs in session tab 2022-05-13 15:56:26 +02:00
Taha Yassine Kraiem
839f4c0927 feat(api): fixed CH client format 2022-05-13 15:49:17 +02:00
Shekar Siri
fd68f7b576 feat(ui) - funnels - path changes 2022-05-13 13:07:35 +02:00
Shekar Siri
87f42b4a79 feat(ui) - funnels - sub details view 2022-05-13 12:35:55 +02:00
Shekar Siri
95f0649ccb Merge branch 'dev' of github.com:openreplay/openreplay into funnels 2022-05-13 11:27:53 +02:00
Shekar Siri
923fce97fb change(ui) - validation based on ee 2022-05-13 11:26:36 +02:00
Shekar Siri
8c7cbbb189 Merge branch 'dev' of github.com:openreplay/openreplay into funnels 2022-05-13 11:22:53 +02:00
Shekar Siri
34947d8ef7 change(ui) - validation based on ee 2022-05-13 11:19:23 +02:00
Shekar Siri
a88763d0eb feat(ui) - funnels - issues list 2022-05-13 11:13:55 +02:00
Alexander
4ac3da241e
Merge branch 'dev' into ender_refactoring 2022-05-12 17:16:45 +02:00
Taha Yassine Kraiem
ac4e32aba3 feat(DB): changed partition expression 2022-05-12 16:24:58 +02:00
Shekar Siri
6a1e72e1d5 feat(ui) - funnels - issues list 2022-05-12 15:15:56 +02:00
Alex K
4f1a686787
Merge pull request #453 from openreplay/sink_refactor
Sink refactor

* structure -> go standarts
* move URLrewrite to sink (free http from encoding-decoding)
2022-05-12 15:03:32 +02:00
Shekar Siri
8584cf74cb feat(ui) - funnels - tailwind config 2022-05-12 14:32:04 +02:00
Shekar Siri
f40403f4e9 feat(ui) - funnels - issues filters 2022-05-12 14:31:44 +02:00
Shekar Siri
8e1bb95c84 feat(ui) - funnels - issues filters 2022-05-12 12:55:34 +02:00
Alexander Zavorotynskiy
ae6af1449c feat(backend-db/heuristics): fixed errors in main files 2022-05-12 09:59:09 +02:00
ShiKhu
883f7eab8a fix(tracker-assist):3.5.9: enforce peerjs@1.3.2 2022-05-11 23:53:19 +02:00
Alex Kaminskii
88bec7ab60 refactor(): separate ieBuilder, peBuilder & networkIssueDeterctor from EventMapper 2022-05-11 21:27:18 +02:00
Alex Kaminskii
6d2bfc0e77 fix(backend/internals): builder codefix 2022-05-11 21:25:41 +02:00
Alex Kaminskii
85b87e17df refactor(backend/internals): builder: message order & timestamps check 2022-05-11 21:14:23 +02:00
Alex Kaminskii
a6f8857b89 refactor-fix(backend-heuristics/db): create handlers for each session separately 2022-05-11 19:04:14 +02:00
Alex Kaminskii
e65fa58ab5 refactor(backend-internal): dry builder 2022-05-11 18:51:55 +02:00
Alex Kaminskii
17d477fc43 fix+style(tracker):3.5.11 fix build & files structure 2022-05-11 18:27:18 +02:00
Alex Kaminskii
396f1a16af refactor(backend-sink): producer close timeout value to config 2022-05-11 17:36:35 +02:00
Shekar Siri
a8fbf50a49 feat(ui) - funnels - issues sort 2022-05-11 17:12:33 +02:00
Alexander Zavorotynskiy
c77966a789 feat(backend/handlers): removed unix timestamp from header builders 2022-05-11 16:45:31 +02:00
Alex Kaminskii
ebc0185806 style(backend-http): split core and local imports 2022-05-11 16:37:49 +02:00
Alex Kaminskii
6456520587 style(backend-http): use UnixMilli 2022-05-11 16:36:31 +02:00
Alex Kaminskii
a241830e71 refactor(backend-sink/http): move URLrewriter to sink 2022-05-11 16:32:27 +02:00
Alex Kaminskii
ea2d13dac6 chore(backend-sink): sink in cmd 2022-05-11 16:27:01 +02:00
Shekar Siri
467e99d90d merge dev changes 2022-05-11 16:16:59 +02:00
Shekar Siri
f5d154bfc2 npm updates 2022-05-11 16:13:26 +02:00
Shekar Siri
bec68eb375 feat(ui) - funnels - issues 2022-05-11 16:13:01 +02:00
Shekar Siri
34425b8b02 feat(ui) - funnels - check for table and funnel 2022-05-10 19:25:08 +02:00
Shekar Siri
9ecb4c369e feat(ui) - funnels - step percentage dynamic 2022-05-10 18:03:19 +02:00
Shekar Siri
0174e265e0 feat(ui) - funnels - step percentage 2022-05-10 17:50:50 +02:00
Shekar Siri
d619083a85 feat(ui) - funnels - step toggle 2022-05-10 17:37:27 +02:00
Shekar Siri
3bb5d9fabd feat(ui) - funnels - graph 2022-05-10 17:17:15 +02:00
Taha Yassine Kraiem
efec096ffe feat(api): fixed sourcemaps reader endpoint 2022-05-10 17:13:19 +02:00
Shekar Siri
5f64bc90dc
Merge pull request #452 from openreplay/audit
Audit Trails
2022-05-10 17:08:21 +02:00
Alexander Zavorotynskiy
26e23d594f feat(backend/handlers): refactored web and ios message handlers 2022-05-10 15:40:55 +02:00
Alexander Zavorotynskiy
47007eb9d7 feat(backend/db): prepared db service for refactoring 2022-05-10 14:11:41 +02:00
Shekar Siri
89db14bdbf feat(ui) - funnels - merged dev 2022-05-10 12:10:18 +02:00
Shekar Siri
eae31eac37 feat(ui) - audit - date 2022-05-09 19:34:59 +02:00
Shekar Siri
5b627c17ec feat(ui) - audit - daterange with new component 2022-05-09 19:02:07 +02:00
Alexander Zavorotynskiy
ca9d76624b feat(backend/heuristics): message handlers refactoring 2022-05-09 16:51:10 +02:00
Taha Yassine Kraiem
d3be02fd9d feat(api): user trail limit changed 2022-05-09 15:30:28 +02:00
Alex Kaminskii
ae4c6e5cad refactor(backend-sink): go go standarts 2022-05-07 23:52:48 +02:00
Alex Kaminskii
324ee0890e chore(backend): enforce amd64 build (for build on amr mac) 2022-05-07 23:21:30 +02:00
Alex Kaminskii
71d50e5a44 refactor(backend-messages):predefined TypeID() on message type 2022-05-07 23:19:49 +02:00
Alex Kaminskii
e4d45e88f9 chore(backend): name entrypoint container 2022-05-07 23:00:00 +02:00
Alex Kaminskii
6ab6d342c0 chore(backend-heuristics/db): remove redundant 2022-05-07 22:16:15 +02:00
Alex Kaminskii
62b36bd70a refactor(backend-heuristics): bring all sub-bilders to common interface 2022-05-07 21:29:40 +02:00
Alex Kaminskii
432c0da4e2 chore(backend-heuristics): Remove redundant lines 2022-05-07 15:10:46 +02:00
Shekar Siri
b97c32ad56 feat(ui) - audit - filters 2022-05-06 18:54:25 +02:00
Taha Yassine Kraiem
7625eb9f8c feat(alerts): changed Dockerfile.alerts 2022-05-06 18:36:46 +02:00
Taha Yassine Kraiem
202bf73456 feat(api): vault support 2022-05-06 18:30:59 +02:00
Taha Yassine Kraiem
516e5b0446 feat(api): changed search user trails by username 2022-05-06 17:43:55 +02:00
Shekar Siri
7feaa376e6 feat(ui) - audit - list and search 2022-05-06 17:31:35 +02:00
Taha Yassine Kraiem
d8078c220d feat(api): search user trails by username
feat(db): index to search user trails by username
2022-05-06 17:27:43 +02:00
Alexander Zavorotynskiy
8c432b8ba3 Removed from heuristics extra logic 2022-05-06 16:39:29 +02:00
Alexander Zavorotynskiy
967034a89c Create first version of heuristics service with the same logic as old ender 2022-05-06 16:12:06 +02:00
Taha Yassine Kraiem
ec445f88c7 feat(api): EE updated authorizer 2022-05-06 15:09:50 +02:00
Alexander Zavorotynskiy
2b3728d8da Finished refactoring for session ender service 2022-05-06 12:21:43 +02:00
Taha Yassine Kraiem
0c84c89b4f feat(api): changed Dockerfile 2022-05-06 12:16:07 +02:00
Taha Yassine Kraiem
50b476316a feat(api): changed root path 2022-05-06 12:11:38 +02:00
Taha Yassine Kraiem
ac9c10393f feat(api): fixed return createdAt with the list of users 2022-05-06 12:07:03 +02:00
Shekar Siri
f12931491a feat(ui) - audit - base views 2022-05-06 12:06:55 +02:00
Taha Yassine Kraiem
ef0edebb3d feat(DB): traces/trails index
feat(api): get all possible traces/trails actions
feat(api): search traces/trails by actions
feat(api): search traces/trails by user
2022-05-06 11:56:03 +02:00
Alex Kaminskii
a99f684b83 feat(frontend-player): sequential (pre)load for multifile sessions 2022-05-06 00:10:08 +02:00
Alex Kaminskii
2d96705930 readme(tracker): build-readme for js packages 2022-05-06 00:07:07 +02:00
Taha Yassine Kraiem
21d8d28a79 feat(api): return createdAt with the list of users 2022-05-05 20:42:08 +02:00
Taha Yassine Kraiem
acaef59590 feat(DB): traces/trails index
feat(api): get all traces/trails
2022-05-05 20:37:37 +02:00
Taha Yassine Kraiem
172508dcf3 feat(DB): changed sessions_metadata sort expression 2022-05-05 18:21:47 +02:00
Alexander Zavorotynskiy
f4212d6eaa Split ender into 2 services (ender and heuristics) 2022-05-05 17:37:05 +02:00
Shekar Siri
bd07d42084 Merge branch 'user-list' into dev 2022-05-05 17:07:36 +02:00
Shekar Siri
b77771ccca change(ui) - user list checking for enterprise 2022-05-05 17:07:16 +02:00
Shekar Siri
17aec98298
Merge pull request #447 from openreplay/user-list
UI Improvements - User, Projects
2022-05-05 16:32:20 +02:00
Shekar Siri
bb1afdc76e fix(ui) - errors viewed state 2022-05-05 16:29:55 +02:00
Alexander Zavorotynskiy
700ef0dcc6 Made standart project layout for ender service 2022-05-05 15:26:10 +02:00
rjshrjndrn
b843aba08a chore(init): create direcotry if not exist
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-05 15:03:31 +02:00
Shekar Siri
59fe8245dd change(ui) - user list tooltip 2022-05-05 14:41:10 +02:00
Shekar Siri
c4b371507d change(ui) - project delete moved to modal 2022-05-05 14:31:53 +02:00
Shekar Siri
c3bb5aeb07 change(ui) - sites search 2022-05-05 13:27:06 +02:00
Shekar Siri
55b64128f1 change(ui) - sites checking for exists 2022-05-05 13:16:06 +02:00
Shekar Siri
dfce25709a change(ui) - user limit check and other fixes 2022-05-05 13:11:20 +02:00
Alex K
50bbd0fe98
Merge pull request #445 from openreplay/db_refactoring
Db refactoring
2022-05-05 12:50:40 +02:00
Alex Kaminskii
b6d57b45ab chore(github-workflow): backend 2022-05-05 12:49:44 +02:00
Alexander Zavorotynskiy
88306e1a6a fix (backend): removed unused import in storage module 2022-05-05 12:04:23 +02:00
Alexander Zavorotynskiy
74756b2409 Refactoring of the db service 2022-05-05 10:46:48 +02:00
Alexander Zavorotynskiy
c050394116 Moved service configs to config module 2022-05-05 10:23:36 +02:00
Shekar Siri
918f7e9d86 change(ui) - user delete 2022-05-05 10:09:16 +02:00
Alexander Zavorotynskiy
167d1e117e Made correct project layout 2022-05-05 09:45:38 +02:00
Alex Kaminskii
6314fcbbef feat(backend): 2 files back compatible format 2022-05-04 20:33:52 +02:00
Shekar Siri
330992736d change(ui) - user form role filter 2022-05-04 19:35:04 +02:00
Shekar Siri
7e655d513c change(ui) - userlist form 2022-05-04 18:53:43 +02:00
Shekar Siri
5ef382c9b8 Merge branch 'dev' of github.com:openreplay/openreplay into user-list 2022-05-04 16:42:45 +02:00
Shekar Siri
c15648eaf7 change(ui) - tailwind justify-self 2022-05-04 16:41:44 +02:00
Shekar Siri
c97fe55cda change(ui) - users list - form 2022-05-04 16:41:29 +02:00
Alexander Zavorotynskiy
5b7c479f4d Refactoring in stats logger 2022-05-04 16:17:57 +02:00
Taha Yassine Kraiem
42f3b6d018 feat(api): changed Dockerfile 2022-05-04 14:50:09 +02:00
Taha Yassine Kraiem
8d5cf84d90 feat(api): changed Dockerfile 2022-05-04 14:36:52 +02:00
Alexander Zavorotynskiy
74672d4321 Removed unused code 2022-05-04 14:36:42 +02:00
Taha Yassine Kraiem
47be240dfb feat(api): changed Dockerfile 2022-05-04 14:32:17 +02:00
Alexander Zavorotynskiy
9cdb1e8ab7 Removed global pg connection 2022-05-04 14:21:15 +02:00
Taha Yassine Kraiem
36b466665c feat(api): changed replay file URL 2022-05-04 13:14:25 +02:00
Shekar Siri
424b071eaf change(ui) - users list - search and pagination 2022-05-04 13:14:20 +02:00
Taha Yassine Kraiem
f90a25c75a feat(api): EE updated dependencies 2022-05-04 13:10:48 +02:00
Taha Yassine Kraiem
144e58adef feat(api): updated dependencies 2022-05-04 13:00:40 +02:00
Shekar Siri
7d08e32d25 change(ui) - users list 2022-05-04 12:27:44 +02:00
Alexander Zavorotynskiy
a4278aec23 [http] removed extra log in main.go
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-04 12:17:33 +02:00
rjshrjndrn
767fa31026 chore(actions): include cmd dir for build
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-04 12:09:46 +02:00
rjshrjndrn
b72a332cd0 chore(build): returning from function
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-04 11:38:23 +02:00
Alex Kaminskii
82084c9717 fix (backend): build.sh build_service incapsulate 2022-05-04 11:23:38 +02:00
Alexander
15563ca582
Merge pull request #442 from openreplay/http_refactoring
Http service refactoring
2022-05-04 10:10:07 +02:00
rjshrjndrn
42e6a63e44 docs(vagrant): create user account comment
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-05-03 21:43:46 +02:00
Alexander Zavorotynskiy
414fbee962 Fixed build.sh file 2022-05-03 13:55:56 +02:00
Shekar Siri
0bbd27e856
Merge pull request #441 from openreplay/session-settings
Session settings
2022-05-03 12:50:54 +02:00
Shekar Siri
690577407d feat(ui) - session settings - cleanup 2022-05-03 12:34:58 +02:00
Alexander Zavorotynskiy
b2456e9ac6 Removed debug lines from build.sh 2022-05-03 12:33:43 +02:00
Shekar Siri
18e932e5e9 feat(ui) - session settings - capture rate api update 2022-05-03 12:26:42 +02:00
Alexander Zavorotynskiy
18d18164b3 Added temporary hack for http service building 2022-05-03 10:42:24 +02:00
Alexander Zavorotynskiy
d02ecba354 Added missed return statements 2022-05-02 17:38:53 +02:00
Alexander Zavorotynskiy
5ec46ad753 Moved assets cache logic 2022-05-02 17:36:33 +02:00
Shekar Siri
87f76f484d feat(ui) - session settings - changed state 2022-05-02 16:31:19 +02:00
Shekar Siri
d2f168f667 remote pull dev 2022-05-02 16:27:53 +02:00
Shekar Siri
02c39199d2 feat(ui) - session settings - changed state 2022-05-02 16:26:05 +02:00
Shekar Siri
e421511db8 feat(ui) - session settings - libs 2022-05-02 16:07:12 +02:00
Shekar Siri
a1b656dc6a feat(ui) - session settings - ui and state 2022-05-02 16:07:00 +02:00
Alexander Zavorotynskiy
69cabaecfe Moved the rest of the code to separate dirs 2022-05-02 15:28:51 +02:00
Alexander Zavorotynskiy
df722761e5 Moved server to a separate dir 2022-05-02 15:20:10 +02:00
Alexander Zavorotynskiy
c347198fc1 Moved http handlers to a separate dir 2022-05-02 15:05:45 +02:00
Alexander Zavorotynskiy
f01ef3ea03 Made a correct project structure for http service 2022-05-02 14:47:13 +02:00
Alexander Zavorotynskiy
66e190221d Removed global objects (moved service initialization into serviceBuilder) 2022-05-02 14:36:02 +02:00
Taha Yassine Kraiem
b87e601f27 chore(vagrant): Changed development.md
chore(vagrant): Added dev setup-scripts for EE
2022-05-02 11:33:39 +02:00
Rajesh Rajendran
867f92dfc7 Update development.md 2022-04-30 18:07:45 +02:00
Taha Yassine Kraiem
6807dc8ce1 feat(api): EE optimized get error details 2022-04-29 18:52:29 +02:00
Alexander Zavorotynskiy
b0bb5bd922 Moved configuration to the separate file 2022-04-29 17:23:20 +02:00
Alexander Zavorotynskiy
10edeb6e2d Refactoring of http handlers 2022-04-29 16:53:28 +02:00
Shekar Siri
27641279b4
Update dashboard.ts 2022-04-29 16:10:03 +02:00
Taha Yassine Kraiem
423f416015 feat(api): fixed description optional value 2022-04-29 16:08:38 +02:00
Shekar Siri
4f1a476c65
Update dashboard.ts 2022-04-29 16:02:14 +02:00
Shekar Siri
6a855a947c
Merge pull request #435 from openreplay/reporting
Dashboard - Report Generation
2022-04-29 15:36:06 +02:00
Shekar Siri
8986f395b1 feat(ui) - dashboard - new libs 2022-04-29 14:27:23 +02:00
Taha Yassine Kraiem
84a43bcd8b feat(api): fixed description default value 2022-04-29 14:16:36 +02:00
Shekar Siri
7c2539ec93 feat(ui) - dashboard - report 2022-04-29 14:16:29 +02:00
Taha Yassine Kraiem
fff8f75fd0 feat(api): changed Dockerfile 2022-04-29 14:06:06 +02:00
Taha Yassine Kraiem
63e897594f feat(db): EE fixed widget-size for upgrade 2022-04-29 14:06:06 +02:00
rjshrjndrn
31f9e49673 chore(vagrant): Adding development readme
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-04-29 14:06:06 +02:00
ShiKhu
6412c2a862 fix(backend/storage): codefix 2022-04-29 14:06:06 +02:00
ShiKhu
1e5deed0d5 feat(backend/storage):split files into 2 2022-04-29 14:06:06 +02:00
Alexander Zavorotynskiy
0bbf8012f1 fix(backend): added missed return in error case 2022-04-29 14:06:06 +02:00
Alexander Zavorotynskiy
9856e36f44 fix(backend): fixed possible panic in the defer 2022-04-29 14:06:06 +02:00
ShiKhu
d699341676 fix(backend): Dockerfile.bundle fix 2022-04-29 14:06:06 +02:00
ShiKhu
fbb039f0c7 fix(backend):pprof launch addr: use port only 2022-04-29 14:06:06 +02:00
ShiKhu
1b93f8a453 gofmt 2022-04-29 14:06:06 +02:00
rjshrjndrn
bdb6a75d7c fix(nginx): proper x-forward-for proxying
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-04-29 14:06:06 +02:00
Rajesh Rajendran
4f44edeb39 Vagrant for local contribution (#434)
* chore(vagrant): initial vagrantfile
* chore(vagrant): adding instructions after installation
* chore(vagrant): Adding vagrant user to docker group
* chore(vagrant): use local docker daemon for k3s
* chore(vagrant): fix comment
* chore(vagrant): adding hostname in /etc/hosts
* chore(vagrant): fix doc
* chore(vagrant): limiting cpu
* chore(frontend): initialize dev env
* chore(docker): adding dockerignore
* chore(dockerfile): using cache for fasten build
* chore(dockerignore): update
* chore(docker): build optimizations
* chore(build): all components build option
* chore(build): utilities build fix
* chore(scrpt): remove debug message
* chore(vagrant): provision using stable branch always

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-04-29 14:06:06 +02:00
Taha Yassine Kraiem
8fa4632ee4 feat(alerts): changed build script 2022-04-29 14:06:06 +02:00
Shekar Siri
59f51cde26 feat(ui) - dashboard - report 2022-04-29 13:56:20 +02:00
Taha Yassine Kraiem
35b9d6ebaf feat(api): s3 helper detect environment
feat(api): support description for dashboards
2022-04-29 13:40:57 +02:00
Shekar Siri
a87717ba8c feat(ui) - dashboard - report 2022-04-29 13:37:30 +02:00
Taha Yassine Kraiem
122705b4c7 feat(db): EE fixed widget-size for upgrade 2022-04-29 13:19:11 +02:00
Shekar Siri
878c742c2f feat(ui) - dashboard - report 2022-04-29 12:32:34 +02:00
rjshrjndrn
89ba052d41 chore(vagrant): Adding development readme
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-04-29 12:17:01 +02:00
Alexander Zavorotynskiy
dc69131499 Deleted commented (unused) code 2022-04-29 11:22:00 +02:00
Shekar Siri
b096ac73d1 feat(ui) - dashboard - report 2022-04-29 10:02:56 +02:00
ShiKhu
cb01c3cb28 fix(backend/storage): codefix 2022-04-28 19:21:45 +02:00
ShiKhu
6d4800feea feat(backend/storage):split files into 2 2022-04-28 19:14:23 +02:00
Alexander Zavorotynskiy
de3ba9c7f6 fix(backend): added missed return in error case 2022-04-28 18:02:56 +02:00
Alexander Zavorotynskiy
3132db6205 fix(backend): fixed possible panic in the defer 2022-04-28 17:55:56 +02:00
ShiKhu
c2d1bcdb35 Merge branch 'backend' into dev 2022-04-28 17:03:25 +02:00
ShiKhu
60d0d42d69 fix(backend): Dockerfile.bundle fix 2022-04-28 17:02:53 +02:00
ShiKhu
d64cd12eb6 fix(backend):pprof launch addr: use port only 2022-04-28 17:02:13 +02:00
Taha Yassine Kraiem
1a73b978dc feat(db): EE remove pages_count column 2022-04-28 15:29:45 +02:00
Taha Yassine Kraiem
b8367d87f8 feat(api): EE fixed No of pages count widget 2022-04-28 14:59:22 +02:00
Taha Yassine Kraiem
aef7026034 feat(api): EE fixed No of pages count widget 2022-04-28 14:59:05 +02:00
Taha Yassine Kraiem
51c75657ab feat(api): EE fixed No of pages count widget 2022-04-28 14:08:23 +02:00
Taha Yassine Kraiem
f8f70b1006 feat(api): EE fixed No of pages count widget 2022-04-28 14:07:28 +02:00
rjshrjndrn
94adb69f6b fix(nginx): proper x-forward-for proxying
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-04-27 15:00:54 +02:00
Rajesh Rajendran
f3b6bda163
Vagrant for local contribution (#434)
* chore(vagrant): initial vagrantfile
* chore(vagrant): adding instructions after installation
* chore(vagrant): Adding vagrant user to docker group
* chore(vagrant): use local docker daemon for k3s
* chore(vagrant): fix comment
* chore(vagrant): adding hostname in /etc/hosts
* chore(vagrant): fix doc
* chore(vagrant): limiting cpu
* chore(frontend): initialize dev env
* chore(docker): adding dockerignore
* chore(dockerfile): using cache for fasten build
* chore(dockerignore): update
* chore(docker): build optimizations
* chore(build): all components build option
* chore(build): utilities build fix
* chore(scrpt): remove debug message
* chore(vagrant): provision using stable branch always

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2022-04-27 12:54:40 +00:00
Taha Yassine Kraiem
72bee8e894 feat(api): round time metrics 2022-04-26 18:10:25 +02:00
Taha Yassine Kraiem
55b504cc22 feat(alerts): changed build script 2022-04-26 16:30:48 +02:00
Taha Yassine Kraiem
f57bf7205c feat(assist): EE fixed geoip-unknown ip 2022-04-26 12:47:18 +02:00
Taha Yassine Kraiem
1832567beb feat(assist): fixed geoip-unknown ip 2022-04-26 12:44:07 +02:00
ShiKhu
43669c082c gofmt 2022-04-25 23:09:52 +02:00
Shekar Siri
53ac4c3321 Merge branch 'dev' of github.com:openreplay/openreplay into funnels 2022-04-25 12:07:19 +02:00
Shekar Siri
fb44ff70fe feat(ui) - funnels wip 2022-04-22 19:07:01 +02:00
Shekar Siri
eeebe11915 Merge branch 'dev' of github.com:openreplay/openreplay into funnels 2022-04-22 16:10:44 +02:00
Shekar Siri
4907c1b26c feat(ui) - funnels listing 2022-04-22 14:47:38 +02:00
Shekar Siri
a287a9ca47 Merge branch 'dev' of github.com:openreplay/openreplay into funnels 2022-04-22 12:47:03 +02:00
Shekar Siri
3882128d4a feat(ui) - funnels - wip 2022-04-21 16:52:01 +02:00
Shekar Siri
45e39c8749 feat(ui) - funnels - wip 2022-04-20 18:05:10 +02:00
1790 changed files with 52109 additions and 92963 deletions

View file

@ -1,10 +1,12 @@
# This action will push the chalice changes to aws # This action will push the chalice changes to aws
on: on:
workflow_dispatch:
push: push:
branches: branches:
- dev - dev
paths: paths:
- ee/api/** - ee/api/**
- api/**
name: Build and Deploy Chalice EE name: Build and Deploy Chalice EE
@ -31,27 +33,64 @@ jobs:
kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret. kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext id: setcontext
# Caching docker images
- uses: satackey/action-docker-layer-caching@v0.0.11
# Ignore the failure of a step and avoid terminating the job.
continue-on-error: true
- name: Building and Pusing api image - name: Building and Pusing api image
id: build-image id: build-image
env: env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
IMAGE_TAG: ee-${{ github.sha }} IMAGE_TAG: ${{ github.sha }}-ee
ENVIRONMENT: staging ENVIRONMENT: staging
run: | run: |
cd api cd api
PUSH_IMAGE=1 bash build.sh ee PUSH_IMAGE=1 bash build.sh ee
- name: Creating old image input
run: |
#
# Create yaml with existing image tags
#
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
echo > /tmp/image_override.yaml
for line in `cat /tmp/image_tag.txt`;
do
image_array=($(echo "$line" | tr ':' '\n'))
cat <<EOF >> /tmp/image_override.yaml
${image_array[0]}:
image:
# We've to strip off the -ee, as helm will append it.
tag: `echo ${image_array[1]} | cut -d '-' -f 1`
EOF
done
- name: Deploy to kubernetes - name: Deploy to kubernetes
run: | run: |
cd scripts/helm/ cd scripts/helmcharts/
sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\" #g" vars.yaml
sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.EE_MINIO_SECRET_KEY }}\" #g" vars.yaml ## Update secerts
sed -i "s#domain_name.*#domain_name: \"foss.openreplay.com\" #g" vars.yaml sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.EE_PG_PASSWORD }}\"/g" vars.yaml
sed -i "s#kubeconfig.*#kubeconfig_path: ${KUBECONFIG}#g" vars.yaml sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\"/g" vars.yaml
sed -i "s/image_tag:.*/image_tag: \"$IMAGE_TAG\"/g" vars.yaml sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.EE_MINIO_SECRET_KEY }}\"/g" vars.yaml
bash kube-install.sh --app chalice sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.EE_JWT_SECRET }}\"/g" vars.yaml
sed -i "s/domainName: \"\"/domainName: \"${{ secrets.EE_DOMAIN_NAME }}\"/g" vars.yaml
sed -i "s/enterpriseEditionLicense: \"\"/enterpriseEditionLicense: \"${{ secrets.EE_LICENSE_KEY }}\"/g" vars.yaml
# Update changed image tag
sed -i "/chalice/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
cat /tmp/image_override.yaml
# Deploy command
helm upgrade --install openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set skipMigration=true --no-hooks
env: env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
IMAGE_TAG: ee-${{ github.sha }} # We're not passing -ee flag, because helm will add that.
IMAGE_TAG: ${{ github.sha }}
ENVIRONMENT: staging ENVIRONMENT: staging
# - name: Debug Job # - name: Debug Job
@ -59,6 +98,6 @@ jobs:
# uses: mxschmitt/action-tmate@v3 # uses: mxschmitt/action-tmate@v3
# env: # env:
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} # DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# IMAGE_TAG: ee-${{ github.sha }} # IMAGE_TAG: ${{ github.sha }}-ee
# ENVIRONMENT: staging # ENVIRONMENT: staging
# #

View file

@ -3,6 +3,7 @@ on:
workflow_dispatch: workflow_dispatch:
push: push:
branches: branches:
- dev
- api-v1.5.5 - api-v1.5.5
paths: paths:
- api/** - api/**
@ -32,6 +33,12 @@ jobs:
kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret. kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext id: setcontext
# Caching docker images
- uses: satackey/action-docker-layer-caching@v0.0.11
# Ignore the failure of a step and avoid terminating the job.
continue-on-error: true
- name: Building and Pusing api image - name: Building and Pusing api image
id: build-image id: build-image
env: env:
@ -41,15 +48,43 @@ jobs:
run: | run: |
cd api cd api
PUSH_IMAGE=1 bash build.sh PUSH_IMAGE=1 bash build.sh
- name: Creating old image input
run: |
#
# Create yaml with existing image tags
#
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
echo > /tmp/image_override.yaml
for line in `cat /tmp/image_tag.txt`;
do
image_array=($(echo "$line" | tr ':' '\n'))
cat <<EOF >> /tmp/image_override.yaml
${image_array[0]}:
image:
tag: ${image_array[1]}
EOF
done
- name: Deploy to kubernetes - name: Deploy to kubernetes
run: | run: |
cd scripts/helm/ cd scripts/helmcharts/
sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\" #g" vars.yaml
sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\" #g" vars.yaml ## Update secerts
sed -i "s#domain_name.*#domain_name: \"foss.openreplay.com\" #g" vars.yaml sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.OSS_PG_PASSWORD }}\"/g" vars.yaml
sed -i "s#kubeconfig.*#kubeconfig_path: ${KUBECONFIG}#g" vars.yaml sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\"/g" vars.yaml
sed -i "s/image_tag:.*/image_tag: \"$IMAGE_TAG\"/g" vars.yaml sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\"/g" vars.yaml
bash kube-install.sh --app chalice sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.OSS_JWT_SECRET }}\"/g" vars.yaml
sed -i "s/domainName: \"\"/domainName: \"${{ secrets.OSS_DOMAIN_NAME }}\"/g" vars.yaml
# Update changed image tag
sed -i "/chalice/{n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
cat /tmp/image_override.yaml
# Deploy command
helm upgrade --install openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set skipMigration=true --no-hooks
env: env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
IMAGE_TAG: ${{ github.sha }} IMAGE_TAG: ${{ github.sha }}

149
.github/workflows/db-migrate.yaml vendored Normal file
View file

@ -0,0 +1,149 @@
name: Database migration Deployment
on:
workflow_dispatch:
push:
branches:
- dev
paths:
- ee/scripts/helm/db/init_dbs/**
- scripts/helm/db/init_dbs/**
# Disable previous workflows for this action.
concurrency:
group: ${{ github.workflow }} #-${{ github.ref }}
cancel-in-progress: false
jobs:
db-migration:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
# We need to diff with old commit
# to see which workers got changed.
fetch-depth: 2
- name: Checking whether migration is needed for OSS
id: check-migration
run: |-
[[ `git --no-pager diff --name-only HEAD HEAD~1 | grep -E "scripts/helm/db/init_dbs" | grep -vE ^ee/` ]] || echo "::set-output name=skip_migration_oss::true"
- uses: azure/k8s-set-context@v1
if: ${{ steps.check-migration.outputs.skip_migration_oss != 'true' }}
with:
method: kubeconfig
kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext
- name: Creating old image input
if: ${{ steps.check-migration.outputs.skip_migration_oss != 'true' }}
run: |
set -x
#
# Create yaml with existing image tags
#
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
echo > /tmp/image_override.yaml
for line in `cat /tmp/image_tag.txt`;
do
image_array=($(echo "$line" | tr ':' '\n'))
cat <<EOF >> /tmp/image_override.yaml
${image_array[0]}:
image:
tag: ${image_array[1]}
EOF
done
- name: Deploy to kubernetes foss
if: ${{ steps.check-migration.outputs.skip_migration_oss != 'true' }}
run: |
cd scripts/helmcharts/
## Update secerts
sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.OSS_PG_PASSWORD }}\"/g" vars.yaml
sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\"/g" vars.yaml
sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\"/g" vars.yaml
sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.OSS_JWT_SECRET }}\"/g" vars.yaml
sed -i "s/domainName: \"\"/domainName: \"${{ secrets.OSS_DOMAIN_NAME }}\"/g" vars.yaml
cat /tmp/image_override.yaml
# Deploy command
helm upgrade --install openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --atomic --set forceMigration=true --set dbMigrationUpstreamBranch=${IMAGE_TAG}
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
IMAGE_TAG: ${{ github.sha }}
ENVIRONMENT: staging
### Enterprise code deployment
- name: cleaning old assets
run: |
rm -rf /tmp/image_*
- uses: azure/k8s-set-context@v1
with:
method: kubeconfig
kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontextee
- name: Creating old image input
env:
IMAGE_TAG: ${{ github.sha }}
run: |
#
# Create yaml with existing image tags
#
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
echo > /tmp/image_override.yaml
for line in `cat /tmp/image_tag.txt`;
do
image_array=($(echo "$line" | tr ':' '\n'))
cat <<EOF >> /tmp/image_override.yaml
${image_array[0]}:
image:
# We've to strip off the -ee, as helm will append it.
tag: `echo ${image_array[1]} | cut -d '-' -f 1`
EOF
done
- name: Resetting vars file
run: |
git checkout -- scripts/helmcharts/vars.yaml
- name: Deploy to kubernetes ee
run: |
cd scripts/helmcharts/
## Update secerts
sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.EE_PG_PASSWORD }}\"/g" vars.yaml
sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\"/g" vars.yaml
sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.EE_MINIO_SECRET_KEY }}\"/g" vars.yaml
sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.EE_JWT_SECRET }}\"/g" vars.yaml
sed -i "s/domainName: \"\"/domainName: \"${{ secrets.EE_DOMAIN_NAME }}\"/g" vars.yaml
sed -i "s/enterpriseEditionLicense: \"\"/enterpriseEditionLicense: \"${{ secrets.EE_LICENSE_KEY }}\"/g" vars.yaml
cat /tmp/image_override.yaml
# Deploy command
helm upgrade --install openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --atomic --set forceMigration=true --set dbMigrationUpstreamBranch=${IMAGE_TAG}
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# We're not passing -ee flag, because helm will add that.
IMAGE_TAG: ${{ github.sha }}
ENVIRONMENT: staging
# - name: Debug Job
# if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3
# env:
# AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
# AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# AWS_REGION: eu-central-1
# AWS_S3_BUCKET_NAME: ${{ secrets.AWS_S3_BUCKET_NAME }}

View file

@ -3,9 +3,13 @@ on:
workflow_dispatch: workflow_dispatch:
push: push:
branches: branches:
- api-v1.5.5 - dev
paths: paths:
- frontend/** - frontend/**
# Disable previous workflows for this action.
concurrency:
group: ${{ github.workflow }} #-${{ github.ref }}
cancel-in-progress: true
jobs: jobs:
build: build:
@ -23,24 +27,137 @@ jobs:
${{ runner.OS }}-build- ${{ runner.OS }}-build-
${{ runner.OS }}- ${{ runner.OS }}-
- name: Docker login
run: |
docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}"
- uses: azure/k8s-set-context@v1 - uses: azure/k8s-set-context@v1
with: with:
method: kubeconfig method: kubeconfig
kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret. kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext id: setcontext
# - name: Install
# run: npm install
- name: Build and deploy - name: Building and Pushing frontend image
id: build-image
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
IMAGE_TAG: ${{ github.sha }}
ENVIRONMENT: staging
run: | run: |
set -x
cd frontend cd frontend
bash build.sh mv .env.sample .env
cp -arl public frontend docker run --rm -v /etc/passwd:/etc/passwd -u `id -u`:`id -g` -v $(pwd):/home/${USER} -w /home/${USER} --name node_build node:14-stretch-slim /bin/bash -c "yarn && yarn build"
minio_pod=$(kubectl get po -n db -l app.kubernetes.io/name=minio -n db --output custom-columns=name:.metadata.name | tail -n+2) # https://github.com/docker/cli/issues/1134#issuecomment-613516912
echo $minio_pod DOCKER_BUILDKIT=1 docker build --target=cicd -t $DOCKER_REPO/frontend:${IMAGE_TAG} .
echo copying frontend to container. docker tag $DOCKER_REPO/frontend:${IMAGE_TAG} $DOCKER_REPO/frontend:${IMAGE_TAG}-ee
kubectl -n db cp frontend $minio_pod:/data/ docker push $DOCKER_REPO/frontend:${IMAGE_TAG}
rm -rf frontend docker push $DOCKER_REPO/frontend:${IMAGE_TAG}-ee
- name: Creating old image input
run: |
set -x
#
# Create yaml with existing image tags
#
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
echo > /tmp/image_override.yaml
for line in `cat /tmp/image_tag.txt`;
do
image_array=($(echo "$line" | tr ':' '\n'))
cat <<EOF >> /tmp/image_override.yaml
${image_array[0]}:
image:
tag: ${image_array[1]}
EOF
done
- name: Deploy to kubernetes foss
run: |
cd scripts/helmcharts/
## Update secerts
sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.OSS_PG_PASSWORD }}\"/g" vars.yaml
sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\"/g" vars.yaml
sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\"/g" vars.yaml
sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.OSS_JWT_SECRET }}\"/g" vars.yaml
sed -i "s/domainName: \"\"/domainName: \"${{ secrets.OSS_DOMAIN_NAME }}\"/g" vars.yaml
# Update changed image tag
sed -i "/frontend/{n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
cat /tmp/image_override.yaml
# Deploy command
helm upgrade --install openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --atomic --set skipMigration=true --no-hooks
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
IMAGE_TAG: ${{ github.sha }}
ENVIRONMENT: staging
### Enterprise code deployment
- name: cleaning old assets
run: |
rm -rf /tmp/image_*
- uses: azure/k8s-set-context@v1
with:
method: kubeconfig
kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontextee
- name: Creating old image input
env:
IMAGE_TAG: ${{ github.sha }}
run: |
#
# Create yaml with existing image tags
#
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
echo > /tmp/image_override.yaml
for line in `cat /tmp/image_tag.txt`;
do
image_array=($(echo "$line" | tr ':' '\n'))
cat <<EOF >> /tmp/image_override.yaml
${image_array[0]}:
image:
# We've to strip off the -ee, as helm will append it.
tag: `echo ${image_array[1]} | cut -d '-' -f 1`
EOF
done
- name: Resetting vars file
run: |
git checkout -- scripts/helmcharts/vars.yaml
- name: Deploy to kubernetes ee
run: |
cd scripts/helmcharts/
## Update secerts
sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.EE_PG_PASSWORD }}\"/g" vars.yaml
sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\"/g" vars.yaml
sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.EE_MINIO_SECRET_KEY }}\"/g" vars.yaml
sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.EE_JWT_SECRET }}\"/g" vars.yaml
sed -i "s/domainName: \"\"/domainName: \"${{ secrets.EE_DOMAIN_NAME }}\"/g" vars.yaml
sed -i "s/enterpriseEditionLicense: \"\"/enterpriseEditionLicense: \"${{ secrets.EE_LICENSE_KEY }}\"/g" vars.yaml
# Update changed image tag
sed -i "/frontend/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
cat /tmp/image_override.yaml
# Deploy command
helm upgrade --install openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set skipMigration=true --no-hooks
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# We're not passing -ee flag, because helm will add that.
IMAGE_TAG: ${{ github.sha }}
ENVIRONMENT: staging
# - name: Debug Job # - name: Debug Job
# if: ${{ failure() }} # if: ${{ failure() }}

View file

@ -1,5 +1,6 @@
# This action will push the utilities changes to aws # This action will push the utilities changes to aws
on: on:
workflow_dispatch:
push: push:
branches: branches:
- dev - dev

View file

@ -1,11 +1,13 @@
# Ref: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions # Ref: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions
on: on:
workflow_dispatch:
push: push:
branches: branches:
- dev - dev
paths: paths:
- ee/backend/** - ee/backend/**
- backend/**
name: Build and deploy workers EE name: Build and deploy workers EE
@ -33,11 +35,16 @@ jobs:
kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret. kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext id: setcontext
- name: Build, tag, and Deploy to k8s # # Caching docker images
# - uses: satackey/action-docker-layer-caching@v0.0.11
# # Ignore the failure of a step and avoid terminating the job.
# continue-on-error: true
- name: Build, tag
id: build-image id: build-image
env: env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
IMAGE_TAG: ee-${{ github.sha }} IMAGE_TAG: ${{ github.sha }}-ee
ENVIRONMENT: staging ENVIRONMENT: staging
run: | run: |
# #
@ -47,35 +54,79 @@ jobs:
# #
# Getting the images to build # Getting the images to build
# #
git diff --name-only HEAD HEAD~1 | grep backend/services | cut -d '/' -f3 | uniq > backend/images_to_build.txt set -x
[[ $(cat backend/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 0) {
git diff --name-only HEAD HEAD~1 | grep -E "backend/cmd|backend/services" | grep -vE ^ee/ | cut -d '/' -f3
git diff --name-only HEAD HEAD~1 | grep -E "backend/pkg|backend/internal" | grep -vE ^ee/ | cut -d '/' -f3 | uniq | while read -r pkg_name ; do
grep -rl "pkg/$pkg_name" backend/services backend/cmd | cut -d '/' -f3
done
} | uniq > /tmp/images_to_build.txt
[[ $(cat /tmp/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 0)
# #
# Pushing image to registry # Pushing image to registry
# #
cd backend cd backend
for image in $(cat images_to_build.txt); for image in $(cat /tmp/images_to_build.txt);
do do
echo "Bulding $image" echo "Bulding $image"
PUSH_IMAGE=1 bash -x ./build.sh ee $image PUSH_IMAGE=1 bash -x ./build.sh ee $image
echo "::set-output name=image::$DOCKER_REPO/$image:$IMAGE_TAG" echo "::set-output name=image::$DOCKER_REPO/$image:$IMAGE_TAG"
done done
- name: Creating old image input
env:
IMAGE_TAG: ${{ github.sha }}
run: |
#
# Create yaml with existing image tags
#
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
echo > /tmp/image_override.yaml
for line in `cat /tmp/image_tag.txt`;
do
image_array=($(echo "$line" | tr ':' '\n'))
cat <<EOF >> /tmp/image_override.yaml
${image_array[0]}:
image:
# We've to strip off the -ee, as helm will append it.
tag: `echo ${image_array[1]} | cut -d '-' -f 1`
EOF
done
- name: Deploying to kuberntes
env:
# We're not passing -ee flag, because helm will add that.
IMAGE_TAG: ${{ github.sha }}
run: |
# #
# Deploying image to environment. # Deploying image to environment.
# #
cd ../scripts/helm/ cd scripts/helmcharts/
sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\" #g" vars.yaml
sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.EE_MINIO_SECRET_KEY }}\" #g" vars.yaml ## Update secerts
sed -i "s#jwt_secret_key.*#jwt_secret_key: \"${{ secrets.EE_JWT_SECRET }}\" #g" vars.yaml sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.EE_PG_PASSWORD }}\"/g" vars.yaml
sed -i "s#domain_name.*#domain_name: \"foss.openreplay.com\" #g" vars.yaml sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\"/g" vars.yaml
sed -i "s#kubeconfig.*#kubeconfig_path: ${KUBECONFIG}#g" vars.yaml sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.EE_MINIO_SECRET_KEY }}\"/g" vars.yaml
for image in $(cat ../../backend/images_to_build.txt); sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.EE_JWT_SECRET }}\"/g" vars.yaml
sed -i "s/domainName: \"\"/domainName: \"${{ secrets.EE_DOMAIN_NAME }}\"/g" vars.yaml
sed -i "s/enterpriseEditionLicense: \"\"/enterpriseEditionLicense: \"${{ secrets.EE_LICENSE_KEY }}\"/g" vars.yaml
## Update images
for image in $(cat /tmp/images_to_build.txt);
do do
sed -i "s/image_tag:.*/image_tag: \"$IMAGE_TAG\"/g" vars.yaml sed -i "/${image}/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
# Deploy command
bash openreplay-cli --install $image
done done
cat /tmp/image_override.yaml
# Deploy command
helm upgrade --install openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set skipMigration=true --no-hooks
# - name: Debug Job # - name: Debug Job
# if: ${{ failure() }} # if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3 # uses: mxschmitt/action-tmate@v3

View file

@ -1,6 +1,7 @@
# Ref: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions # Ref: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions
on: on:
workflow_dispatch:
push: push:
branches: branches:
- dev - dev
@ -32,8 +33,14 @@ jobs:
method: kubeconfig method: kubeconfig
kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret. kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext id: setcontext
# Caching docker images
# - uses: satackey/action-docker-layer-caching@v0.0.11
# # Ignore the failure of a step and avoid terminating the job.
# continue-on-error: true
- name: Build, tag, and Deploy to k8s
- name: Build, tag
id: build-image id: build-image
env: env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
@ -47,42 +54,73 @@ jobs:
# #
# Getting the images to build # Getting the images to build
# #
set -x
{ {
git diff --name-only HEAD HEAD~1 | grep backend/services | grep -vE ^ee/ | cut -d '/' -f3 git diff --name-only HEAD HEAD~1 | grep -E "backend/cmd|backend/services" | grep -vE ^ee/ | cut -d '/' -f3
git diff --name-only HEAD HEAD~1 | grep backend/pkg | grep -vE ^ee/ | cut -d '/' -f3 | uniq | while read -r pkg_name ; do git diff --name-only HEAD HEAD~1 | grep -E "backend/pkg|backend/internal" | grep -vE ^ee/ | cut -d '/' -f3 | uniq | while read -r pkg_name ; do
grep -rl "pkg/$pkg_name" backend/services | cut -d '/' -f3 grep -rl "pkg/$pkg_name" backend/services backend/cmd | cut -d '/' -f3
done done
} | uniq > backend/images_to_build.txt } | uniq > /tmp/images_to_build.txt
[[ $(cat backend/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 0) [[ $(cat /tmp/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 0)
# #
# Pushing image to registry # Pushing image to registry
# #
cd backend cd backend
for image in $(cat images_to_build.txt); for image in $(cat /tmp/images_to_build.txt);
do do
echo "Bulding $image" echo "Bulding $image"
PUSH_IMAGE=1 bash -x ./build.sh skip $image PUSH_IMAGE=1 bash -x ./build.sh skip $image
echo "::set-output name=image::$DOCKER_REPO/$image:$IMAGE_TAG" echo "::set-output name=image::$DOCKER_REPO/$image:$IMAGE_TAG"
done done
- name: Creating old image input
env:
IMAGE_TAG: ${{ github.sha }}
run: |
#
# Create yaml with existing image tags
#
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
echo > /tmp/image_override.yaml
for line in `cat /tmp/image_tag.txt`;
do
image_array=($(echo "$line" | tr ':' '\n'))
cat <<EOF >> /tmp/image_override.yaml
${image_array[0]}:
image:
tag: ${image_array[1]}
EOF
done
- name: Deploying to kuberntes
env:
IMAGE_TAG: ${{ github.sha }}
run: |
# #
# Deploying image to environment. # Deploying image to environment.
# #
cd ../scripts/helm/ cd scripts/helmcharts/
sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\" #g" vars.yaml
sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\" #g" vars.yaml ## Update secerts
sed -i "s#domain_name.*#domain_name: \"foss.openreplay.com\" #g" vars.yaml sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.OSS_PG_PASSWORD }}\"/g" vars.yaml
sed -i "s#kubeconfig.*#kubeconfig_path: ${KUBECONFIG}#g" vars.yaml sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\"/g" vars.yaml
for image in $(cat ../../backend/images_to_build.txt); sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\"/g" vars.yaml
sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.OSS_JWT_SECRET }}\"/g" vars.yaml
sed -i "s/domainName: \"\"/domainName: \"${{ secrets.OSS_DOMAIN_NAME }}\"/g" vars.yaml
## Update images
for image in $(cat /tmp/images_to_build.txt);
do do
sed -i "s/image_tag:.*/image_tag: \"$IMAGE_TAG\"/g" vars.yaml sed -i "/${image}/{n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
# Deploy command
bash kube-install.sh --app $image
done done
# Deploy command
helm upgrade --install openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set skipMigration=true --no-hooks
# - name: Debug Job # - name: Debug Job
# if: ${{ failure() }} # if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3 # uses: mxschmitt/action-tmate@v3

6
api/.dockerignore Normal file
View file

@ -0,0 +1,6 @@
# ignore .git and .cache folders
.git
.cache
**/build.sh
**/build_*.sh
**/*deploy.sh

View file

@ -1,20 +1,7 @@
FROM python:3.9.10-slim FROM python:3.9.12-slim
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>" LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>" LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
WORKDIR /work
COPY . .
RUN pip install -r requirements.txt
RUN mv .env.default .env
ENV APP_NAME chalice ENV APP_NAME chalice
# Installing Nodejs
RUN apt update && apt install -y curl && \
curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \
apt install -y nodejs && \
apt remove --purge -y curl && \
rm -rf /var/lib/apt/lists/* && \
cd sourcemap-reader && \
npm install
# Add Tini # Add Tini
# Startup daemon # Startup daemon
ENV TINI_VERSION v0.19.0 ENV TINI_VERSION v0.19.0
@ -22,5 +9,23 @@ ARG envarg
ENV ENTERPRISE_BUILD ${envarg} ENV ENTERPRISE_BUILD ${envarg}
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini
RUN chmod +x /tini RUN chmod +x /tini
# Installing Nodejs
RUN apt update && apt install -y curl && \
curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \
apt install -y nodejs && \
apt remove --purge -y curl && \
rm -rf /var/lib/apt/lists/*
WORKDIR /work_tmp
COPY requirements.txt /work_tmp/requirements.txt
RUN pip install -r /work_tmp/requirements.txt
COPY sourcemap-reader/*.json /work_tmp/
RUN cd /work_tmp && npm install
WORKDIR /work
COPY . .
RUN mv env.default .env && mv /work_tmp/node_modules sourcemap-reader/.
ENTRYPOINT ["/tini", "--"] ENTRYPOINT ["/tini", "--"]
CMD ./entrypoint.sh CMD ./entrypoint.sh

View file

@ -1,13 +1,9 @@
FROM python:3.9.10-slim FROM python:3.9.12-slim
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>" LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>" LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
WORKDIR /work
COPY . .
RUN pip install -r requirements.txt
RUN mv .env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh
ENV pg_minconn 2
ENV APP_NAME alerts ENV APP_NAME alerts
ENV pg_minconn 2
ENV pg_maxconn 10
# Add Tini # Add Tini
# Startup daemon # Startup daemon
ENV TINI_VERSION v0.19.0 ENV TINI_VERSION v0.19.0
@ -15,5 +11,13 @@ ARG envarg
ENV ENTERPRISE_BUILD ${envarg} ENV ENTERPRISE_BUILD ${envarg}
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini
RUN chmod +x /tini RUN chmod +x /tini
COPY requirements.txt /work_tmp/requirements.txt
RUN pip install -r /work_tmp/requirements.txt
WORKDIR /work
COPY . .
RUN mv .env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh
ENTRYPOINT ["/tini", "--"] ENTRYPOINT ["/tini", "--"]
CMD ./entrypoint.sh CMD ./entrypoint.sh

View file

@ -1,4 +1,4 @@
FROM python:3.9.10-slim FROM python:3.9.12-slim
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>" LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
WORKDIR /work WORKDIR /work
COPY . . COPY . .

View file

@ -19,10 +19,14 @@ class JWTAuth(HTTPBearer):
if not credentials.scheme == "Bearer": if not credentials.scheme == "Bearer":
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.")
jwt_payload = authorizers.jwt_authorizer(credentials.scheme + " " + credentials.credentials) jwt_payload = authorizers.jwt_authorizer(credentials.scheme + " " + credentials.credentials)
auth_exists = jwt_payload is not None \
and users.auth_exists(user_id=jwt_payload.get("userId", -1),
tenant_id=jwt_payload.get("tenantId", -1),
jwt_iat=jwt_payload.get("iat", 100),
jwt_aud=jwt_payload.get("aud", ""))
if jwt_payload is None \ if jwt_payload is None \
or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \ or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \
or not users.auth_exists(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"], or not auth_exists:
jwt_iat=jwt_payload["iat"], jwt_aud=jwt_payload["aud"]):
print("JWTAuth: Token issue") print("JWTAuth: Token issue")
if jwt_payload is not None: if jwt_payload is not None:
print(jwt_payload) print(jwt_payload)
@ -34,21 +38,19 @@ class JWTAuth(HTTPBearer):
print("JWTAuth: iat is None") print("JWTAuth: iat is None")
if jwt_payload is not None and jwt_payload.get("aud") is None: if jwt_payload is not None and jwt_payload.get("aud") is None:
print("JWTAuth: aud is None") print("JWTAuth: aud is None")
if jwt_payload is not None and \ if jwt_payload is not None and not auth_exists:
not users.auth_exists(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"],
jwt_iat=jwt_payload["iat"], jwt_aud=jwt_payload["aud"]):
print("JWTAuth: not users.auth_exists") print("JWTAuth: not users.auth_exists")
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
user = users.get(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"]) user = users.get(user_id=jwt_payload.get("userId", -1), tenant_id=jwt_payload.get("tenantId", -1))
if user is None: if user is None:
print("JWTAuth: User not found.") print("JWTAuth: User not found.")
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.")
jwt_payload["authorizer_identity"] = "jwt" jwt_payload["authorizer_identity"] = "jwt"
print(jwt_payload) print(jwt_payload)
request.state.authorizer_identity = "jwt" request.state.authorizer_identity = "jwt"
request.state.currentContext = CurrentContext(tenant_id=jwt_payload["tenantId"], request.state.currentContext = CurrentContext(tenant_id=jwt_payload.get("tenantId", -1),
user_id=jwt_payload["userId"], user_id=jwt_payload.get("userId", -1),
email=user["email"]) email=user["email"])
return request.state.currentContext return request.state.currentContext

View file

@ -12,9 +12,9 @@ envarg="default-foss"
check_prereq() { check_prereq() {
which docker || { which docker || {
echo "Docker not installed, please install docker." echo "Docker not installed, please install docker."
exit=1 exit 1
} }
[[ exit -eq 1 ]] && exit 1 return
} }
function build_api(){ function build_api(){
@ -32,9 +32,11 @@ function build_api(){
docker push ${DOCKER_REPO:-'local'}/chalice:${git_sha1} docker push ${DOCKER_REPO:-'local'}/chalice:${git_sha1}
docker tag ${DOCKER_REPO:-'local'}/chalice:${git_sha1} ${DOCKER_REPO:-'local'}/chalice:${tag}latest docker tag ${DOCKER_REPO:-'local'}/chalice:${git_sha1} ${DOCKER_REPO:-'local'}/chalice:${tag}latest
docker push ${DOCKER_REPO:-'local'}/chalice:${tag}latest docker push ${DOCKER_REPO:-'local'}/chalice:${tag}latest
} }
echo "api docker build completed"
} }
check_prereq check_prereq
build_api $1 build_api $1
IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO bash build_alerts.sh $1 echo buil_complete
IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO bash build_alerts.sh $1

View file

@ -27,7 +27,7 @@ function make_submodule() {
mkdir -p ./alerts/chalicelib/ mkdir -p ./alerts/chalicelib/
cp -R ./chalicelib/__init__.py ./alerts/chalicelib/ cp -R ./chalicelib/__init__.py ./alerts/chalicelib/
mkdir -p ./alerts/chalicelib/core/ mkdir -p ./alerts/chalicelib/core/
cp -R ./chalicelib/core/{__init__,alerts_processor,alerts_listener,sessions,events,issues,sessions_metas,metadata,projects,users,authorizers,tenants,roles,assist,events_ios,sessions_mobs,errors,dashboard,sourcemaps,sourcemaps_parser,resources,performance_event,alerts,notifications,slack,collaboration_slack,webhook}.py ./alerts/chalicelib/core/ cp -R ./chalicelib/core/{__init__,alerts_processor,alerts_listener,sessions,events,issues,sessions_metas,metadata,projects,users,authorizers,tenants,roles,assist,events_ios,sessions_mobs,errors,metrics,sourcemaps,sourcemaps_parser,resources,performance_event,alerts,notifications,slack,collaboration_slack,webhook}.py ./alerts/chalicelib/core/
mkdir -p ./alerts/chalicelib/utils/ mkdir -p ./alerts/chalicelib/utils/
cp -R ./chalicelib/utils/{__init__,TimeUTC,pg_client,helper,event_filter_definition,dev,SAML2_helper,email_helper,email_handler,smtp,s3,args_transformer,ch_client,metrics_helper}.py ./alerts/chalicelib/utils/ cp -R ./chalicelib/utils/{__init__,TimeUTC,pg_client,helper,event_filter_definition,dev,SAML2_helper,email_helper,email_handler,smtp,s3,args_transformer,ch_client,metrics_helper}.py ./alerts/chalicelib/utils/
# -- end of generated part # -- end of generated part
@ -64,7 +64,8 @@ function build_api(){
docker tag ${DOCKER_REPO:-'local'}/alerts:${git_sha1} ${DOCKER_REPO:-'local'}/alerts:${tag}latest docker tag ${DOCKER_REPO:-'local'}/alerts:${git_sha1} ${DOCKER_REPO:-'local'}/alerts:${tag}latest
docker push ${DOCKER_REPO:-'local'}/alerts:${tag}latest docker push ${DOCKER_REPO:-'local'}/alerts:${tag}latest
} }
echo "completed alerts build"
} }
check_prereq check_prereq
build_api $1 build_api $1

View file

@ -99,10 +99,10 @@ def Build(a):
j_s = True j_s = True
if a["seriesId"] is not None: if a["seriesId"] is not None:
a["filter"]["sort"] = "session_id" a["filter"]["sort"] = "session_id"
a["filter"]["order"] = "DESC" a["filter"]["order"] = schemas.SortOrderType.desc
a["filter"]["startDate"] = -1 a["filter"]["startDate"] = -1
a["filter"]["endDate"] = TimeUTC.now() a["filter"]["endDate"] = TimeUTC.now()
full_args, query_part= sessions.search_query_parts( full_args, query_part = sessions.search_query_parts(
data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False, data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False,
issue=None, project_id=a["projectId"], user_id=None, favorite_only=False) issue=None, project_id=a["projectId"], user_id=None, favorite_only=False)
subQ = f"""SELECT COUNT(session_id) AS value subQ = f"""SELECT COUNT(session_id) AS value

View file

@ -1,6 +1,7 @@
import requests import requests
from decouple import config from decouple import config
import schemas
from chalicelib.core import projects from chalicelib.core import projects
SESSION_PROJECTION_COLS = """s.project_id, SESSION_PROJECTION_COLS = """s.project_id,
@ -19,22 +20,40 @@ SESSION_PROJECTION_COLS = """s.project_id,
""" """
def get_live_sessions_ws(project_id, user_id=None): def get_live_sessions_ws_user_id(project_id, user_id):
data = {
"filter": {"userId": user_id} if user_id else {}
}
return __get_live_sessions_ws(project_id=project_id, data=data)
def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSchema):
data = {
"filter": {},
"pagination": {"limit": body.limit, "page": body.page},
"sort": {"key": body.sort, "order": body.order}
}
for f in body.filters:
if f.type == schemas.LiveFilterType.metadata:
data["filter"][f.source] = f.value
else:
data["filter"][f.type.value] = f.value
return __get_live_sessions_ws(project_id=project_id, data=data)
def __get_live_sessions_ws(project_id, data):
project_key = projects.get_project_key(project_id) project_key = projects.get_project_key(project_id)
params = {}
if user_id and len(user_id) > 0:
params["userId"] = user_id
try: try:
connected_peers = requests.get(config("assist") % config("S3_KEY") + f"/{project_key}", params, connected_peers = requests.post(config("assist") % config("S3_KEY") + f"/{project_key}", json=data,
timeout=config("assistTimeout", cast=int, default=5)) timeout=config("assistTimeout", cast=int, default=5))
if connected_peers.status_code != 200: if connected_peers.status_code != 200:
print("!! issue with the peer-server") print("!! issue with the peer-server")
print(connected_peers.text) print(connected_peers.text)
return [] return {"total": 0, "sessions": []}
live_peers = connected_peers.json().get("data", []) live_peers = connected_peers.json().get("data", [])
except requests.exceptions.Timeout: except requests.exceptions.Timeout:
print("Timeout getting Assist response") print("Timeout getting Assist response")
live_peers = [] live_peers = {"total": 0, "sessions": []}
except Exception as e: except Exception as e:
print("issue getting Live-Assist response") print("issue getting Live-Assist response")
print(str(e)) print(str(e))
@ -43,34 +62,55 @@ def get_live_sessions_ws(project_id, user_id=None):
print(connected_peers.text) print(connected_peers.text)
except: except:
print("couldn't get response") print("couldn't get response")
live_peers = [] live_peers = {"total": 0, "sessions": []}
_live_peers = live_peers
for s in live_peers: if "sessions" in live_peers:
_live_peers = live_peers["sessions"]
for s in _live_peers:
s["live"] = True s["live"] = True
s["projectId"] = project_id s["projectId"] = project_id
live_peers = sorted(live_peers, key=lambda l: l.get("timestamp", 0), reverse=True)
return live_peers return live_peers
def get_live_session_by_id(project_id, session_id): def get_live_session_by_id(project_id, session_id):
all_live = get_live_sessions_ws(project_id) project_key = projects.get_project_key(project_id)
for l in all_live: try:
if str(l.get("sessionID")) == str(session_id): connected_peers = requests.get(config("assist") % config("S3_KEY") + f"/{project_key}/{session_id}",
return l timeout=config("assistTimeout", cast=int, default=5))
return None if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
return False
connected_peers = connected_peers.json().get("data")
if connected_peers is None:
return None
connected_peers["live"] = True
except requests.exceptions.Timeout:
print("Timeout getting Assist response")
return None
except Exception as e:
print("issue getting Assist response")
print(str(e))
print("expected JSON, received:")
try:
print(connected_peers.text)
except:
print("couldn't get response")
return None
return connected_peers
def is_live(project_id, session_id, project_key=None): def is_live(project_id, session_id, project_key=None):
if project_key is None: if project_key is None:
project_key = projects.get_project_key(project_id) project_key = projects.get_project_key(project_id)
try: try:
connected_peers = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}", connected_peers = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5)) timeout=config("assistTimeout", cast=int, default=5))
if connected_peers.status_code != 200: if connected_peers.status_code != 200:
print("!! issue with the peer-server") print("!! issue with the peer-server")
print(connected_peers.text) print(connected_peers.text)
return False return False
connected_peers = connected_peers.json().get("data", []) connected_peers = connected_peers.json().get("data")
except requests.exceptions.Timeout: except requests.exceptions.Timeout:
print("Timeout getting Assist response") print("Timeout getting Assist response")
return False return False
@ -83,7 +123,35 @@ def is_live(project_id, session_id, project_key=None):
except: except:
print("couldn't get response") print("couldn't get response")
return False return False
return str(session_id) in connected_peers return str(session_id) == connected_peers
def autocomplete(project_id, q: str, key: str = None):
project_key = projects.get_project_key(project_id)
params = {"q": q}
if key:
params["key"] = key
try:
results = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}/autocomplete",
params=params, timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print("!! issue with the peer-server")
print(results.text)
return {"errors": [f"Something went wrong wile calling assist:{results.text}"]}
results = results.json().get("data", [])
except requests.exceptions.Timeout:
print("Timeout getting Assist response")
return {"errors": ["Assist request timeout"]}
except Exception as e:
print("issue getting Assist response")
print(str(e))
print("expected JSON, received:")
try:
print(results.text)
except:
print("couldn't get response")
return {"errors": ["Something went wrong wile calling assist"]}
return {"data": results}
def get_ice_servers(): def get_ice_servers():

View file

@ -2,7 +2,7 @@ import json
from typing import Union from typing import Union
import schemas import schemas
from chalicelib.core import sessions from chalicelib.core import sessions, funnels, errors, issues
from chalicelib.utils import helper, pg_client from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
@ -42,7 +42,66 @@ def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema):
return results return results
def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema): def __is_funnel_chart(data: schemas.TryCustomMetricsPayloadSchema):
return data.metric_type == schemas.MetricType.funnel
def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema):
if len(data.series) == 0:
return {
"stages": [],
"totalDropDueToIssues": 0
}
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter)
def __is_errors_list(data):
return data.metric_type == schemas.MetricType.table \
and data.metric_of == schemas.TableMetricOfType.errors
def __get_errors_list(project_id, user_id, data):
if len(data.series) == 0:
return {
"total": 0,
"errors": []
}
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
data.series[0].filter.page = data.page
data.series[0].filter.limit = data.limit
return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id)
def __is_sessions_list(data):
return data.metric_type == schemas.MetricType.table \
and data.metric_of == schemas.TableMetricOfType.sessions
def __get_sessions_list(project_id, user_id, data):
if len(data.series) == 0:
print("empty series")
return {
"total": 0,
"sessions": []
}
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
data.series[0].filter.page = data.page
data.series[0].filter.limit = data.limit
return sessions.search2_pg(data=data.series[0].filter, project_id=project_id, user_id=user_id)
def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema, user_id=None):
if __is_funnel_chart(data):
return __get_funnel_chart(project_id=project_id, data=data)
elif __is_errors_list(data):
return __get_errors_list(project_id=project_id, user_id=user_id, data=data)
elif __is_sessions_list(data):
return __get_sessions_list(project_id=project_id, user_id=user_id, data=data)
series_charts = __try_live(project_id=project_id, data=data) series_charts = __try_live(project_id=project_id, data=data)
if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table: if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table:
return series_charts return series_charts
@ -75,15 +134,22 @@ def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPa
if metric is None: if metric is None:
return None return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data) metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
series_charts = __try_live(project_id=project_id, data=metric)
if metric.view_type == schemas.MetricTimeseriesViewType.progress or metric.metric_type == schemas.MetricType.table: return merged_live(project_id=project_id, data=metric, user_id=user_id)
return series_charts # if __is_funnel_chart(metric):
results = [{}] * len(series_charts[0]) # return __get_funnel_chart(project_id=project_id, data=metric)
for i in range(len(results)): # elif __is_errors_list(metric):
for j, series_chart in enumerate(series_charts): # return __get_errors_list(project_id=project_id, user_id=user_id, data=metric)
results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"], #
metric.series[j].name: series_chart[i]["count"]} # series_charts = __try_live(project_id=project_id, data=metric)
return results # if metric.view_type == schemas.MetricTimeseriesViewType.progress or metric.metric_type == schemas.MetricType.table:
# return series_charts
# results = [{}] * len(series_charts[0])
# for i in range(len(results)):
# for j, series_chart in enumerate(series_charts):
# results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"],
# metric.series[j].name: series_chart[i]["count"]}
# return results
def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema): def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
@ -105,6 +171,38 @@ def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessi
return results return results
def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
for s in metric.series:
s.filter.startDate = data.startTimestamp
s.filter.endDate = data.endTimestamp
s.filter.limit = data.limit
s.filter.page = data.page
return {"seriesId": s.series_id, "seriesName": s.name,
**funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)}
def get_errors_list(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
for s in metric.series:
s.filter.startDate = data.startTimestamp
s.filter.endDate = data.endTimestamp
s.filter.limit = data.limit
s.filter.page = data.page
return {"seriesId": s.series_id, "seriesName": s.name,
**errors.search(data=s.filter, project_id=project_id, user_id=user_id)}
def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadSchema): def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadSchema):
results = [] results = []
if data.series is None: if data.series is None:
@ -130,12 +228,16 @@ def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema, dashboa
_data[f"filter_{i}"] = s.filter.json() _data[f"filter_{i}"] = s.filter.json()
series_len = len(data.series) series_len = len(data.series)
data.series = None data.series = None
params = {"user_id": user_id, "project_id": project_id, **data.dict(), **_data} params = {"user_id": user_id, "project_id": project_id,
"default_config": json.dumps(data.config.dict()),
**data.dict(), **_data}
query = cur.mogrify(f"""\ query = cur.mogrify(f"""\
WITH m AS (INSERT INTO metrics (project_id, user_id, name, is_public, WITH m AS (INSERT INTO metrics (project_id, user_id, name, is_public,
view_type, metric_type, metric_of, metric_value, metric_format) view_type, metric_type, metric_of, metric_value,
metric_format, default_config)
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s, VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s, %(metric_format)s) %(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
%(metric_format)s, %(default_config)s)
RETURNING *) RETURNING *)
INSERT INSERT
INTO metric_series(metric_id, index, name, filter) INTO metric_series(metric_id, index, name, filter)
@ -396,3 +498,41 @@ def change_state(project_id, metric_id, user_id, status):
{"metric_id": metric_id, "status": status, "user_id": user_id}) {"metric_id": metric_id, "status": status, "user_id": user_id})
) )
return get(metric_id=metric_id, project_id=project_id, user_id=user_id) return get(metric_id=metric_id, project_id=project_id, user_id=user_id)
def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
data: schemas.CustomMetricSessionsPayloadSchema
# , range_value=None, start_date=None, end_date=None
):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
for s in metric.series:
s.filter.startDate = data.startTimestamp
s.filter.endDate = data.endTimestamp
s.filter.limit = data.limit
s.filter.page = data.page
issues_list = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {})
issues_list = issues_list.get("significant", []) + issues_list.get("insignificant", [])
issue = None
for i in issues_list:
if i.get("issueId", "") == issue_id:
issue = i
break
if issue is None:
issue = issues.get(project_id=project_id, issue_id=issue_id)
if issue is not None:
issue = {**issue,
"affectedSessions": 0,
"affectedUsers": 0,
"conversionImpact": 0,
"lostConversions": 0,
"unaffectedSessions": 0}
return {"seriesId": s.series_id, "seriesName": s.name,
"sessions": sessions.search2_pg(user_id=user_id, project_id=project_id,
issue=issue, data=s.filter)
if issue is not None else {"total": 0, "sessions": []},
"issue": issue}

View file

@ -6,8 +6,9 @@ from chalicelib.utils import helper
from chalicelib.utils import pg_client from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
# category name should be lower cased
CATEGORY_DESCRIPTION = { CATEGORY_DESCRIPTION = {
'overview': 'High-level metrics and web vitals.', 'web vitals': 'A set of metrics that assess app performance on criteria such as load time, load performance, and stability.',
'custom': 'Previously created custom metrics by me and my team.', 'custom': 'Previously created custom metrics by me and my team.',
'errors': 'Keep a closer eye on errors and track their type, origin and domain.', 'errors': 'Keep a closer eye on errors and track their type, origin and domain.',
'performance': 'Optimize your apps performance by tracking slow domains, page response times, memory consumption, CPU usage and more.', 'performance': 'Optimize your apps performance by tracking slow domains, page response times, memory consumption, CPU usage and more.',
@ -33,17 +34,20 @@ def get_templates(project_id, user_id):
cur.execute(pg_query) cur.execute(pg_query)
rows = cur.fetchall() rows = cur.fetchall()
for r in rows: for r in rows:
r["description"] = CATEGORY_DESCRIPTION.get(r["category"], "") r["description"] = CATEGORY_DESCRIPTION.get(r["category"].lower(), "")
for w in r["widgets"]: for w in r["widgets"]:
w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"]) w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"]) w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
for s in w["series"]:
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
return helper.list_to_camel_case(rows) return helper.list_to_camel_case(rows)
def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema): def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned) pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned, description)
VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s) VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s, %(description)s)
RETURNING *""" RETURNING *"""
params = {"userId": user_id, "projectId": project_id, **data.dict()} params = {"userId": user_id, "projectId": project_id, **data.dict()}
if data.metrics is not None and len(data.metrics) > 0: if data.metrics is not None and len(data.metrics) > 0:
@ -134,7 +138,8 @@ def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashbo
row = cur.fetchone() row = cur.fetchone()
offset = row["count"] offset = row["count"]
pg_query = f"""UPDATE dashboards pg_query = f"""UPDATE dashboards
SET name = %(name)s SET name = %(name)s,
description= %(description)s
{", is_public = %(is_public)s" if data.is_public is not None else ""} {", is_public = %(is_public)s" if data.is_public is not None else ""}
{", is_pinned = %(is_pinned)s" if data.is_pinned is not None else ""} {", is_pinned = %(is_pinned)s" if data.is_pinned is not None else ""}
WHERE dashboards.project_id = %(projectId)s WHERE dashboards.project_id = %(projectId)s

View file

@ -425,10 +425,9 @@ def __get_sort_key(key):
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
empty_response = {"data": { empty_response = {'total': 0,
'total': 0, 'errors': []
'errors': [] }
}}
platform = None platform = None
for f in data.filters: for f in data.filters:
@ -437,6 +436,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id") pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id")
pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'", pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
"pe.project_id=%(project_id)s"] "pe.project_id=%(project_id)s"]
# To ignore Script error
pg_sub_query.append("pe.message!='Script error.'")
pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True, project_key=None) pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True, project_key=None)
# pg_sub_query_chart.append("source ='js_exception'") # pg_sub_query_chart.append("source ='js_exception'")
pg_sub_query_chart.append("errors.error_id =details.error_id") pg_sub_query_chart.append("errors.error_id =details.error_id")
@ -463,7 +464,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
sort = __get_sort_key('datetime') sort = __get_sort_key('datetime')
if data.sort is not None: if data.sort is not None:
sort = __get_sort_key(data.sort) sort = __get_sort_key(data.sort)
order = "DESC" order = schemas.SortOrderType.desc
if data.order is not None: if data.order is not None:
order = data.order order = data.order
extra_join = "" extra_join = ""
@ -544,7 +545,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
rows = cur.fetchall() rows = cur.fetchall()
total = 0 if len(rows) == 0 else rows[0]["full_count"] total = 0 if len(rows) == 0 else rows[0]["full_count"]
if flows: if flows:
return {"data": {"count": total}} return {"count": total}
if total == 0: if total == 0:
rows = [] rows = []
@ -592,10 +593,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))] and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))]
offset -= len(rows) offset -= len(rows)
return { return {
"data": { 'total': total - offset,
'total': total - offset, 'errors': helper.list_to_camel_case(rows)
'errors': helper.list_to_camel_case(rows)
}
} }

View file

@ -28,8 +28,8 @@ def __merge_cells(rows, start, count, replacement):
return rows return rows
def __get_grouped_clickrage(rows, session_id): def __get_grouped_clickrage(rows, session_id, project_id):
click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage") click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id)
if len(click_rage_issues) == 0: if len(click_rage_issues) == 0:
return rows return rows
@ -63,7 +63,7 @@ def get_by_sessionId2_pg(session_id, project_id, group_clickrage=False):
) )
rows = cur.fetchall() rows = cur.fetchall()
if group_clickrage: if group_clickrage:
rows = __get_grouped_clickrage(rows=rows, session_id=session_id) rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
cur.execute(cur.mogrify(""" cur.execute(cur.mogrify("""
SELECT SELECT
@ -435,7 +435,15 @@ def __get_autocomplete_table(value, project_id):
query = cur.mogrify(" UNION ".join(sub_queries) + ";", query = cur.mogrify(" UNION ".join(sub_queries) + ";",
{"project_id": project_id, "value": helper.string_to_sql_like(value), {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}) "svalue": helper.string_to_sql_like("^" + value)})
cur.execute(query) try:
cur.execute(query)
except Exception as err:
print("--------- AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------")
print(query.decode('UTF-8'))
print("--------- VALUE -----------")
print(value)
print("--------------------")
raise err
results = helper.list_to_camel_case(cur.fetchall()) results = helper.list_to_camel_case(cur.fetchall())
return results return results
@ -464,14 +472,13 @@ def search(text, event_type, project_id, source, key):
return {"data": rows} return {"data": rows}
def get_errors_by_session_id(session_id): def get_errors_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\ cur.execute(cur.mogrify(f"""\
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
FROM {event_type.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id) FROM {event_type.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
WHERE WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
er.session_id = %(session_id)s ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
ORDER BY timestamp;""", {"session_id": session_id}))
errors = cur.fetchall() errors = cur.fetchall()
for e in errors: for e in errors:
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"]) e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])

View file

@ -251,6 +251,26 @@ def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.Fu
"totalDropDueToIssues": total_drop_due_to_issues}} "totalDropDueToIssues": total_drop_due_to_issues}}
# def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema):
def get_top_insights_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema):
data.events = filter_stages(__parse_events(data.events))
data.events = __fix_stages(data.events)
if len(data.events) == 0:
return {"stages": [], "totalDropDueToIssues": 0}
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
insights = helper.list_to_camel_case(insights)
if len(insights) > 0:
# TODO: check if this correct
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
if len(insights) == 0:
total_drop_due_to_issues = 0
else:
total_drop_due_to_issues = insights[0]["sessionsCount"] - insights[-1]["sessionsCount"]
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
return {"stages": insights,
"totalDropDueToIssues": total_drop_due_to_issues}
def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None): def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None: if f is None:
@ -280,6 +300,19 @@ def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSe
last_stage=len(data.events)))} last_stage=len(data.events)))}
# def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema):
def get_issues_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema):
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) < 0:
return {"issues": []}
return {
"issues": helper.dict_to_camel_case(
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
last_stage=len(data.events)))}
def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True): def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
cur.execute( cur.execute(

View file

@ -41,19 +41,23 @@ def get(project_id, issue_id):
) )
cur.execute(query=query) cur.execute(query=query)
data = cur.fetchone() data = cur.fetchone()
if data is not None:
data["title"] = helper.get_issue_title(data["type"])
return helper.dict_to_camel_case(data) return helper.dict_to_camel_case(data)
def get_by_session_id(session_id, issue_type=None): def get_by_session_id(session_id, project_id, issue_type=None):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
cur.execute( cur.execute(
cur.mogrify(f"""\ cur.mogrify(f"""\
SELECT * SELECT *
FROM events_common.issues FROM events_common.issues
INNER JOIN public.issues USING (issue_id) INNER JOIN public.issues USING (issue_id)
WHERE session_id = %(session_id)s {"AND type = %(type)s" if issue_type is not None else ""} WHERE session_id = %(session_id)s
AND project_id= %(project_id)s
{"AND type = %(type)s" if issue_type is not None else ""}
ORDER BY timestamp;""", ORDER BY timestamp;""",
{"session_id": session_id, "type": issue_type}) {"session_id": session_id, "project_id": project_id, "type": issue_type})
) )
return helper.list_to_camel_case(cur.fetchall()) return helper.list_to_camel_case(cur.fetchall())

View file

@ -1,21 +1,9 @@
from chalicelib.utils import pg_client EDITION = 'foss'
def get_status(tenant_id=None): def get_status(tenant_id=None):
with pg_client.PostgresClient() as cur:
cur.execute("SELECT * FROM public.tenants;")
r = cur.fetchone()
return { return {
"hasActivePlan": True, "hasActivePlan": True,
"current": { "edition": EDITION,
"edition": r.get("edition", "").upper(), "expirationDate": -1
"versionNumber": r.get("version_number", ""),
"license": "",
"expirationDate": -1
},
"count": {
"teamMember": r.get("t_users"),
"projects": r.get("t_projects"),
"capturedSessions": r.get("t_sessions")
}
} }

View file

@ -1,4 +1,5 @@
from elasticsearch import Elasticsearch, RequestsHttpConnection # from elasticsearch import Elasticsearch, RequestsHttpConnection
from elasticsearch import Elasticsearch
from chalicelib.core import log_tools from chalicelib.core import log_tools
import base64 import base64
import logging import logging
@ -58,20 +59,21 @@ def add_edit(tenant_id, project_id, data):
def __get_es_client(host, port, api_key_id, api_key, use_ssl=False, timeout=15): def __get_es_client(host, port, api_key_id, api_key, use_ssl=False, timeout=15):
scheme = "http" if host.startswith("http") else "https"
host = host.replace("http://", "").replace("https://", "") host = host.replace("http://", "").replace("https://", "")
try: try:
args = { args = {
"hosts": [{"host": host, "port": port}], "hosts": [{"host": host, "port": port, "scheme": scheme}],
"use_ssl": use_ssl,
"verify_certs": False, "verify_certs": False,
"ca_certs": False, # "ca_certs": False,
"connection_class": RequestsHttpConnection, # "connection_class": RequestsHttpConnection,
"timeout": timeout "request_timeout": timeout,
"api_key": (api_key_id, api_key)
} }
if api_key_id is not None and len(api_key_id) > 0: # if api_key_id is not None and len(api_key_id) > 0:
# args["http_auth"] = (username, password) # # args["http_auth"] = (username, password)
token = "ApiKey " + base64.b64encode(f"{api_key_id}:{api_key}".encode("utf-8")).decode("utf-8") # token = "ApiKey " + base64.b64encode(f"{api_key_id}:{api_key}".encode("utf-8")).decode("utf-8")
args["headers"] = {"Authorization": token} # args["headers"] = {"Authorization": token}
es = Elasticsearch( es = Elasticsearch(
**args **args
) )

View file

@ -967,7 +967,7 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=-
cur.execute(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone() row = cur.fetchone()
row["unit"] = schemas.TemplatePredefinedUnits.millisecond helper.__time_value(row)
return row return row
@ -1069,11 +1069,11 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=-
pg_sub_query.append("pages.speed_index>0") pg_sub_query.append("pages.speed_index>0")
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT sessions.user_country, AVG(pages.speed_index) AS avg pg_query = f"""SELECT sessions.user_country, AVG(pages.speed_index) AS value
FROM events.pages INNER JOIN public.sessions USING (session_id) FROM events.pages INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)} WHERE {" AND ".join(pg_sub_query)}
GROUP BY sessions.user_country GROUP BY sessions.user_country
ORDER BY avg,sessions.user_country;""" ORDER BY value, sessions.user_country;"""
params = {"project_id": project_id, params = {"project_id": project_id,
"startTimestamp": startTimestamp, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)} "endTimestamp": endTimestamp, **__get_constraint_values(args)}
@ -1087,7 +1087,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=-
avg = cur.fetchone()["avg"] avg = cur.fetchone()["avg"]
else: else:
avg = 0 avg = 0
return {"avg": avg, "chart": helper.list_to_camel_case(rows)} return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.millisecond}
def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1), def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1126,7 +1126,9 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1
WHERE {" AND ".join(pg_sub_query)};""" WHERE {" AND ".join(pg_sub_query)};"""
cur.execute(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params))
avg = cur.fetchone()["avg"] avg = cur.fetchone()["avg"]
return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.millisecond} result = {"value": avg, "chart": rows}
helper.__time_value(result)
return result
def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(delta_days=-1), def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1169,7 +1171,7 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(
else: else:
quantiles = [0 for i in range(len(quantiles_keys))] quantiles = [0 for i in range(len(quantiles_keys))]
result = { result = {
"avg": avg, "value": avg,
"total": sum(r["count"] for r in rows), "total": sum(r["count"] for r in rows),
"chart": [], "chart": [],
"percentiles": [{ "percentiles": [{
@ -1177,7 +1179,8 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(
"responseTime": int(quantiles[i]) "responseTime": int(quantiles[i])
} for i, v in enumerate(quantiles_keys) } for i, v in enumerate(quantiles_keys)
], ],
"extremeValues": [{"count": 0}] "extremeValues": [{"count": 0}],
"unit": schemas.TemplatePredefinedUnits.millisecond
} }
rows = helper.list_to_camel_case(rows) rows = helper.list_to_camel_case(rows)
_99 = result["percentiles"][-1]["responseTime"] _99 = result["percentiles"][-1]["responseTime"]
@ -1348,7 +1351,7 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
"endTimestamp": endTimestamp, "value": url, **__get_constraint_values(args)} "endTimestamp": endTimestamp, "value": url, **__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone() row = cur.fetchone()
row["unit"] = schemas.TemplatePredefinedUnits.millisecond helper.__time_value(row)
return row return row
@ -1498,7 +1501,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
pg_sub_query_chart.append("m_issues.type = 'crash'") pg_sub_query_chart.append("m_issues.type = 'crash'")
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT generated_timestamp AS timestamp, pg_query = f"""SELECT generated_timestamp AS timestamp,
COUNT(sessions) AS count COUNT(sessions) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( LEFT JOIN LATERAL (
SELECT sessions.session_id SELECT sessions.session_id
@ -1556,7 +1559,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
versions.append({v["version"]: v["count"] / (r["total"] / 100)}) versions.append({v["version"]: v["count"] / (r["total"] / 100)})
r["versions"] = versions r["versions"] = versions
return {"chart": rows, "browsers": browsers} return {"chart": rows, "browsers": browsers, "unit": schemas.TemplatePredefinedUnits.count}
def __get_neutral(rows, add_All_if_empty=True): def __get_neutral(rows, add_All_if_empty=True):
@ -1719,7 +1722,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT pg_query = f"""SELECT
resources.url_host AS domain, resources.url_host AS domain,
AVG(resources.duration) AS avg AVG(resources.duration) AS value
FROM events.resources INNER JOIN sessions USING (session_id) FROM events.resources INNER JOIN sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)} WHERE {" AND ".join(pg_sub_query)}
GROUP BY resources.url_host GROUP BY resources.url_host
@ -1738,7 +1741,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
avg = cur.fetchone()["avg"] avg = cur.fetchone()["avg"]
else: else:
avg = 0 avg = 0
return {"avg": avg, "partition": rows} return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.millisecond}
def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -2241,7 +2244,7 @@ def get_application_activity_avg_image_load_time(project_id, startTimestamp=Time
row = __get_application_activity_avg_image_load_time(cur, project_id, startTimestamp, endTimestamp, **args) row = __get_application_activity_avg_image_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
previous = helper.dict_to_camel_case(row) previous = helper.dict_to_camel_case(row)
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond helper.__time_value(results)
return results return results
@ -2300,7 +2303,7 @@ def __get_application_activity_avg_page_load_time(cur, project_id, startTimestam
cur.execute(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone() row = cur.fetchone()
row["unit"] = schemas.TemplatePredefinedUnits.millisecond helper.__time_value(row)
return row return row
@ -2316,7 +2319,7 @@ def get_application_activity_avg_page_load_time(project_id, startTimestamp=TimeU
row = __get_application_activity_avg_page_load_time(cur, project_id, startTimestamp, endTimestamp, **args) row = __get_application_activity_avg_page_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
previous = helper.dict_to_camel_case(row) previous = helper.dict_to_camel_case(row)
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond helper.__time_value(results)
return results return results
@ -2369,7 +2372,7 @@ def __get_application_activity_avg_request_load_time(cur, project_id, startTimes
"endTimestamp": endTimestamp, **__get_constraint_values(args)})) "endTimestamp": endTimestamp, **__get_constraint_values(args)}))
row = cur.fetchone() row = cur.fetchone()
row["unit"] = schemas.TemplatePredefinedUnits.millisecond helper.__time_value(row)
return row return row
@ -2385,7 +2388,7 @@ def get_application_activity_avg_request_load_time(project_id, startTimestamp=Ti
row = __get_application_activity_avg_request_load_time(cur, project_id, startTimestamp, endTimestamp, **args) row = __get_application_activity_avg_request_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
previous = helper.dict_to_camel_case(row) previous = helper.dict_to_camel_case(row)
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond helper.__time_value(results)
return results return results
@ -2442,7 +2445,7 @@ def get_page_metrics_avg_dom_content_load_start(project_id, startTimestamp=TimeU
row = __get_page_metrics_avg_dom_content_load_start(cur, project_id, startTimestamp, endTimestamp, **args) row = __get_page_metrics_avg_dom_content_load_start(cur, project_id, startTimestamp, endTimestamp, **args)
previous = helper.dict_to_camel_case(row) previous = helper.dict_to_camel_case(row)
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond helper.__time_value(results)
return results return results
@ -2512,7 +2515,7 @@ def get_page_metrics_avg_first_contentful_pixel(project_id, startTimestamp=TimeU
if len(rows) > 0: if len(rows) > 0:
previous = helper.dict_to_camel_case(rows[0]) previous = helper.dict_to_camel_case(rows[0])
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond helper.__time_value(results)
return results return results
@ -2645,7 +2648,7 @@ def get_user_activity_avg_session_duration(project_id, startTimestamp=TimeUTC.no
previous = helper.dict_to_camel_case(row) previous = helper.dict_to_camel_case(row)
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.millisecond helper.__time_value(results)
return results return results
@ -2731,7 +2734,7 @@ def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(del
cur.execute(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall() rows = cur.fetchall()
row["chart"] = helper.list_to_camel_case(rows) row["chart"] = helper.list_to_camel_case(rows)
row["unit"] = schemas.TemplatePredefinedUnits.millisecond helper.__time_value(row)
return helper.dict_to_camel_case(row) return helper.dict_to_camel_case(row)
@ -2772,7 +2775,7 @@ def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta
cur.execute(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall() rows = cur.fetchall()
row["chart"] = helper.list_to_camel_case(rows) row["chart"] = helper.list_to_camel_case(rows)
row["unit"] = schemas.TemplatePredefinedUnits.millisecond helper.__time_value(row)
return helper.dict_to_camel_case(row) return helper.dict_to_camel_case(row)
@ -2816,7 +2819,7 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no
cur.execute(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall() rows = cur.fetchall()
row["chart"] = helper.list_to_camel_case(rows) row["chart"] = helper.list_to_camel_case(rows)
row["unit"] = schemas.TemplatePredefinedUnits.millisecond helper.__time_value(row)
return helper.dict_to_camel_case(row) return helper.dict_to_camel_case(row)
@ -2857,7 +2860,7 @@ def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(de
cur.execute(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall() rows = cur.fetchall()
row["chart"] = helper.list_to_camel_case(rows) row["chart"] = helper.list_to_camel_case(rows)
row["unit"] = schemas.TemplatePredefinedUnits.millisecond helper.__time_value(row)
return helper.dict_to_camel_case(row) return helper.dict_to_camel_case(row)
@ -2899,7 +2902,7 @@ def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.n
cur.execute(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall() rows = cur.fetchall()
row["chart"] = helper.list_to_camel_case(rows) row["chart"] = helper.list_to_camel_case(rows)
row["unit"] = schemas.TemplatePredefinedUnits.millisecond helper.__time_value(row)
return helper.dict_to_camel_case(row) return helper.dict_to_camel_case(row)

View file

@ -25,6 +25,22 @@ def get_all(tenant_id, user_id):
return rows return rows
def get_all_count(tenant_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT COALESCE(COUNT(notifications.*),0) AS count
FROM public.notifications
LEFT JOIN (SELECT notification_id
FROM public.user_viewed_notifications
WHERE user_viewed_notifications.user_id = %(user_id)s) AS user_viewed_notifications USING (notification_id)
WHERE (notifications.user_id IS NULL OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL;""",
{"user_id": user_id})
)
row = cur.fetchone()
return row
def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None): def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None):
if (notification_ids is None or len(notification_ids) == 0) and endTimestamp is None: if (notification_ids is None or len(notification_ids) == 0) and endTimestamp is None:
return False return False

View file

@ -1,8 +1,10 @@
from chalicelib.utils import helper, pg_client from chalicelib.utils import helper, pg_client
from decouple import config
def get_by_session_id(session_id, project_id): def get_by_session_id(session_id, project_id, start_ts, duration):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
delta = config("events_ts_delta", cast=int, default=5 * 60) * 1000
ch_query = """\ ch_query = """\
SELECT SELECT
timestamp AS datetime, timestamp AS datetime,
@ -16,8 +18,13 @@ def get_by_session_id(session_id, project_id):
success, success,
COALESCE(status, CASE WHEN success THEN 200 END) AS status COALESCE(status, CASE WHEN success THEN 200 END) AS status
FROM events.resources INNER JOIN sessions USING (session_id) FROM events.resources INNER JOIN sessions USING (session_id)
WHERE session_id = %(session_id)s AND project_id= %(project_id)s;""" WHERE session_id = %(session_id)s
params = {"session_id": session_id, "project_id": project_id} AND project_id= %(project_id)s
AND sessions.start_ts=%(start_ts)s
AND resources.timestamp>=%(res_start_ts)s
AND resources.timestamp>=%(res_end_ts)s;"""
params = {"session_id": session_id, "project_id": project_id, "start_ts": start_ts, "duration": duration,
"res_start_ts": start_ts - delta, "res_end_ts": start_ts + duration + delta, }
cur.execute(cur.mogrify(ch_query, params)) cur.execute(cur.mogrify(ch_query, params))
rows = cur.fetchall() rows = cur.fetchall()
return helper.list_to_camel_case(rows) return helper.list_to_camel_case(rows)

View file

@ -85,7 +85,7 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_
else: else:
data['events'] = events.get_by_sessionId2_pg(project_id=project_id, session_id=session_id, data['events'] = events.get_by_sessionId2_pg(project_id=project_id, session_id=session_id,
group_clickrage=True) group_clickrage=True)
all_errors = events.get_errors_by_session_id(session_id=session_id) all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id)
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
# to keep only the first stack # to keep only the first stack
data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors if data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors if
@ -94,17 +94,21 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_
data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id, data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id,
session_id=session_id) session_id=session_id)
data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id) data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id)
data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id) data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id,
start_ts=data["startTs"],
duration=data["duration"])
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
data['issues'] = issues.get_by_session_id(session_id=session_id) data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id)
data['live'] = live and assist.is_live(project_id=project_id, data['live'] = live and assist.is_live(project_id=project_id,
session_id=session_id, session_id=session_id,
project_key=data["projectKey"]) project_key=data["projectKey"])
data["inDB"] = True data["inDB"] = True
return data return data
else: elif live:
return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) return assist.get_live_session_by_id(project_id=project_id, session_id=session_id)
else:
return None
def __get_sql_operator(op: schemas.SearchEventOperator): def __get_sql_operator(op: schemas.SearchEventOperator):
@ -201,12 +205,12 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e
elif data.group_by_user: elif data.group_by_user:
g_sort = "count(full_sessions)" g_sort = "count(full_sessions)"
if data.order is None: if data.order is None:
data.order = "DESC" data.order = schemas.SortOrderType.desc
else: else:
data.order = data.order.upper() data.order = data.order.upper()
if data.sort is not None and data.sort != 'sessionsCount': if data.sort is not None and data.sort != 'sessionsCount':
sort = helper.key_to_snake_case(data.sort) sort = helper.key_to_snake_case(data.sort)
g_sort = f"{'MIN' if data.order == 'DESC' else 'MAX'}({sort})" g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})"
else: else:
sort = 'start_ts' sort = 'start_ts'
@ -230,7 +234,7 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e
full_args) full_args)
else: else:
if data.order is None: if data.order is None:
data.order = "DESC" data.order = schemas.SortOrderType.desc
sort = 'session_id' sort = 'session_id'
if data.sort is not None and data.sort != "session_id": if data.sort is not None and data.sort != "session_id":
# sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
@ -254,9 +258,9 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e
cur.execute(main_query) cur.execute(main_query)
except Exception as err: except Exception as err:
print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------") print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------")
print(main_query) print(main_query.decode('UTF-8'))
print("--------- PAYLOAD -----------") print("--------- PAYLOAD -----------")
print(data.dict()) print(data.json())
print("--------------------") print("--------------------")
raise err raise err
if errors_only: if errors_only:
@ -1199,7 +1203,7 @@ def get_session_ids_by_user_ids(project_id, user_ids):
def delete_sessions_by_session_ids(session_ids): def delete_sessions_by_session_ids(session_ids):
with pg_client.PostgresClient(long_query=True) as cur: with pg_client.PostgresClient(unlimited_query=True) as cur:
query = cur.mogrify( query = cur.mogrify(
"""\ """\
DELETE FROM public.sessions DELETE FROM public.sessions
@ -1213,7 +1217,7 @@ def delete_sessions_by_session_ids(session_ids):
def delete_sessions_by_user_ids(project_id, user_ids): def delete_sessions_by_user_ids(project_id, user_ids):
with pg_client.PostgresClient(long_query=True) as cur: with pg_client.PostgresClient(unlimited_query=True) as cur:
query = cur.mogrify( query = cur.mogrify(
"""\ """\
DELETE FROM public.sessions DELETE FROM public.sessions
@ -1227,6 +1231,6 @@ def delete_sessions_by_user_ids(project_id, user_ids):
def count_all(): def count_all():
with pg_client.PostgresClient(long_query=True) as cur: with pg_client.PostgresClient(unlimited_query=True) as cur:
row = cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions") row = cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions")
return row.get("count", 0) return row.get("count", 0)

View file

@ -5,14 +5,23 @@ from chalicelib.utils.s3 import client
def get_web(sessionId): def get_web(sessionId):
return client.generate_presigned_url( return [
'get_object', client.generate_presigned_url(
Params={ 'get_object',
'Bucket': config("sessions_bucket"), Params={
'Key': str(sessionId) 'Bucket': config("sessions_bucket"),
}, 'Key': str(sessionId)
ExpiresIn=100000 },
) ExpiresIn=100000
),
client.generate_presigned_url(
'get_object',
Params={
'Bucket': config("sessions_bucket"),
'Key': str(sessionId) + "e"
},
ExpiresIn=100000
)]
def get_ios(sessionId): def get_ios(sessionId):

View file

@ -24,7 +24,6 @@ T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.36
21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042} 21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042}
def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
""" """
Add minimal timestamp Add minimal timestamp
@ -293,7 +292,6 @@ def pearson_corr(x: list, y: list):
return r, confidence, False return r, confidence, False
def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_with_context, first_stage, last_stage): def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_with_context, first_stage, last_stage):
""" """
Returns two lists with binary values 0/1: Returns two lists with binary values 0/1:
@ -363,7 +361,6 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_
return transitions, errors, all_errors, n_sess_affected return transitions, errors, all_errors, n_sess_affected
def get_affected_users_for_all_issues(rows, first_stage, last_stage): def get_affected_users_for_all_issues(rows, first_stage, last_stage):
""" """
@ -415,7 +412,6 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage):
return all_issues_with_context, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict, contexts return all_issues_with_context, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict, contexts
def count_sessions(rows, n_stages): def count_sessions(rows, n_stages):
session_counts = {i: set() for i in range(1, n_stages + 1)} session_counts = {i: set() for i in range(1, n_stages + 1)}
for ind, row in enumerate(rows): for ind, row in enumerate(rows):
@ -467,7 +463,6 @@ def get_stages(stages, rows):
return stages_list return stages_list
def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False): def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False):
""" """
@ -544,7 +539,6 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
return n_critical_issues, issues_dict, total_drop_due_to_issues return n_critical_issues, issues_dict, total_drop_due_to_issues
def get_top_insights(filter_d, project_id): def get_top_insights(filter_d, project_id):
output = [] output = []
stages = filter_d.get("events", []) stages = filter_d.get("events", [])
@ -582,9 +576,8 @@ def get_top_insights(filter_d, project_id):
return stages_list, total_drop_due_to_issues return stages_list, total_drop_due_to_issues
def get_issues_list(filter_d, project_id, first_stage=None, last_stage=None): def get_issues_list(filter_d, project_id, first_stage=None, last_stage=None):
output = dict({'critical_issues_count': 0}) output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []})
stages = filter_d.get("events", []) stages = filter_d.get("events", [])
# The result of the multi-stage query # The result of the multi-stage query
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id) rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)

View file

@ -67,8 +67,8 @@ def create_step1(data: schemas.UserSignupSchema):
} }
query = f"""\ query = f"""\
WITH t AS ( WITH t AS (
INSERT INTO public.tenants (name, version_number, edition) INSERT INTO public.tenants (name, version_number)
VALUES (%(organizationName)s, (SELECT openreplay_version()), 'fos') VALUES (%(organizationName)s, (SELECT openreplay_version()))
RETURNING api_key RETURNING api_key
), ),
u AS ( u AS (
@ -77,8 +77,8 @@ def create_step1(data: schemas.UserSignupSchema):
RETURNING user_id,email,role,name RETURNING user_id,email,role,name
), ),
au AS (INSERT au AS (INSERT
INTO public.basic_authentication (user_id, password, generated_password) INTO public.basic_authentication (user_id, password)
VALUES ((SELECT user_id FROM u), crypt(%(password)s, gen_salt('bf', 12)), FALSE) VALUES ((SELECT user_id FROM u), crypt(%(password)s, gen_salt('bf', 12)))
) )
INSERT INTO public.projects (name, active) INSERT INTO public.projects (name, active)
VALUES (%(projectName)s, TRUE) VALUES (%(projectName)s, TRUE)

View file

@ -1,13 +1,15 @@
from chalicelib.utils import pg_client from chalicelib.utils import pg_client
import requests import requests
from chalicelib.core import license
def process_data(data, edition='fos'): def process_data(data):
return { return {
'edition': edition, 'edition': license.EDITION,
'tracking': data["opt_out"], 'tracking': data["opt_out"],
'version': data["version_number"], 'version': data["version_number"],
'user_id': data["user_id"], 'user_id': data["tenant_key"],
'tenant_key': data["tenant_key"],
'owner_email': None if data["opt_out"] else data["email"], 'owner_email': None if data["opt_out"] else data["email"],
'organization_name': None if data["opt_out"] else data["name"], 'organization_name': None if data["opt_out"] else data["name"],
'users_count': data["t_users"], 'users_count': data["t_users"],
@ -27,7 +29,7 @@ def compute():
t_projects=COALESCE((SELECT COUNT(*) FROM public.projects WHERE deleted_at ISNULL), 0), t_projects=COALESCE((SELECT COUNT(*) FROM public.projects WHERE deleted_at ISNULL), 0),
t_sessions=COALESCE((SELECT COUNT(*) FROM public.sessions), 0), t_sessions=COALESCE((SELECT COUNT(*) FROM public.sessions), 0),
t_users=COALESCE((SELECT COUNT(*) FROM public.users WHERE deleted_at ISNULL), 0) t_users=COALESCE((SELECT COUNT(*) FROM public.users WHERE deleted_at ISNULL), 0)
RETURNING name,t_integrations,t_projects,t_sessions,t_users,user_id,opt_out, RETURNING name,t_integrations,t_projects,t_sessions,t_users,tenant_key,opt_out,
(SELECT openreplay_version()) AS version_number,(SELECT email FROM public.users WHERE role = 'owner' LIMIT 1);""" (SELECT openreplay_version()) AS version_number,(SELECT email FROM public.users WHERE role = 'owner' LIMIT 1);"""
) )
data = cur.fetchone() data = cur.fetchone()
@ -39,6 +41,7 @@ def new_client():
cur.execute( cur.execute(
f"""SELECT *, f"""SELECT *,
(SELECT email FROM public.users WHERE role='owner' LIMIT 1) AS email (SELECT email FROM public.users WHERE role='owner' LIMIT 1) AS email
FROM public.tenants;""") FROM public.tenants
LIMIT 1;""")
data = cur.fetchone() data = cur.fetchone()
requests.post('https://api.openreplay.com/os/signup', json=process_data(data)) requests.post('https://api.openreplay.com/os/signup', json=process_data(data))

View file

@ -1,7 +1,7 @@
import schemas import schemas
from chalicelib.utils import pg_client from chalicelib.utils import pg_client
from chalicelib.utils import helper from chalicelib.utils import helper
from chalicelib.core import users from chalicelib.core import users, license
def get_by_tenant_id(tenant_id): def get_by_tenant_id(tenant_id):
@ -13,7 +13,7 @@ def get_by_tenant_id(tenant_id):
name, name,
api_key, api_key,
created_at, created_at,
edition, '{license.EDITION}' AS edition,
version_number, version_number,
opt_out opt_out
FROM public.tenants FROM public.tenants
@ -67,7 +67,7 @@ def update(tenant_id, user_id, data: schemas.UpdateTenantSchema):
admin = users.get(user_id=user_id, tenant_id=tenant_id) admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]: if not admin["admin"] and not admin["superAdmin"]:
return {"error": "unauthorized"} return {"errors": ["unauthorized, needs admin or owner"]}
if data.name is None and data.opt_out is None: if data.name is None and data.opt_out is None:
return {"errors": ["please provide 'name' of 'optOut' attribute for update"]} return {"errors": ["please provide 'name' of 'optOut' attribute for update"]}
changes = {} changes = {}

View file

@ -4,6 +4,7 @@ import secrets
from decouple import config from decouple import config
from fastapi import BackgroundTasks from fastapi import BackgroundTasks
import schemas
from chalicelib.core import authorizers, metadata, projects from chalicelib.core import authorizers, metadata, projects
from chalicelib.core import tenants, assist from chalicelib.core import tenants, assist
from chalicelib.utils import dev, email_helper from chalicelib.utils import dev, email_helper
@ -21,10 +22,10 @@ def create_new_member(email, invitation_token, admin, name, owner=False):
query = cur.mogrify(f"""\ query = cur.mogrify(f"""\
WITH u AS (INSERT INTO public.users (email, role, name, data) WITH u AS (INSERT INTO public.users (email, role, name, data)
VALUES (%(email)s, %(role)s, %(name)s, %(data)s) VALUES (%(email)s, %(role)s, %(name)s, %(data)s)
RETURNING user_id,email,role,name,appearance RETURNING user_id,email,role,name
), ),
au AS (INSERT INTO public.basic_authentication (user_id, generated_password, invitation_token, invited_at) au AS (INSERT INTO public.basic_authentication (user_id, invitation_token, invited_at)
VALUES ((SELECT user_id FROM u), TRUE, %(invitation_token)s, timezone('utc'::text, now())) VALUES ((SELECT user_id FROM u), %(invitation_token)s, timezone('utc'::text, now()))
RETURNING invitation_token RETURNING invitation_token
) )
SELECT u.user_id, SELECT u.user_id,
@ -32,7 +33,6 @@ def create_new_member(email, invitation_token, admin, name, owner=False):
u.email, u.email,
u.role, u.role,
u.name, u.name,
TRUE AS change_password,
(CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member, (CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member,
@ -61,7 +61,6 @@ def restore_member(user_id, email, invitation_token, admin, name, owner=False):
email, email,
role, role,
name, name,
TRUE AS change_password,
(CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member;""", (CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member;""",
@ -73,8 +72,7 @@ def restore_member(user_id, email, invitation_token, admin, name, owner=False):
result = cur.fetchone() result = cur.fetchone()
query = cur.mogrify("""\ query = cur.mogrify("""\
UPDATE public.basic_authentication UPDATE public.basic_authentication
SET generated_password = TRUE, SET invitation_token = %(invitation_token)s,
invitation_token = %(invitation_token)s,
invited_at = timezone('utc'::text, now()), invited_at = timezone('utc'::text, now()),
change_pwd_expire_at = NULL, change_pwd_expire_at = NULL,
change_pwd_token = NULL change_pwd_token = NULL
@ -118,7 +116,7 @@ def reset_member(tenant_id, editor_id, user_id_to_update):
def update(tenant_id, user_id, changes): def update(tenant_id, user_id, changes):
AUTH_KEYS = ["password", "generatedPassword", "invitationToken", "invitedAt", "changePwdExpireAt", "changePwdToken"] AUTH_KEYS = ["password", "invitationToken", "invitedAt", "changePwdExpireAt", "changePwdToken"]
if len(changes.keys()) == 0: if len(changes.keys()) == 0:
return None return None
@ -132,11 +130,7 @@ def update(tenant_id, user_id, changes):
else: else:
sub_query_bauth.append(f"{helper.key_to_snake_case(key)} = %({key})s") sub_query_bauth.append(f"{helper.key_to_snake_case(key)} = %({key})s")
else: else:
if key == "appearance": sub_query_users.append(f"{helper.key_to_snake_case(key)} = %({key})s")
sub_query_users.append(f"appearance = %(appearance)s::jsonb")
changes["appearance"] = json.dumps(changes[key])
else:
sub_query_users.append(f"{helper.key_to_snake_case(key)} = %({key})s")
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
if len(sub_query_users) > 0: if len(sub_query_users) > 0:
@ -151,11 +145,9 @@ def update(tenant_id, user_id, changes):
users.email, users.email,
users.role, users.role,
users.name, users.name,
basic_authentication.generated_password AS change_password,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member;""",
users.appearance;""",
{"user_id": user_id, **changes}) {"user_id": user_id, **changes})
) )
if len(sub_query_bauth) > 0: if len(sub_query_bauth) > 0:
@ -170,11 +162,9 @@ def update(tenant_id, user_id, changes):
users.email, users.email,
users.role, users.role,
users.name, users.name,
basic_authentication.generated_password AS change_password,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member;""",
users.appearance;""",
{"user_id": user_id, **changes}) {"user_id": user_id, **changes})
) )
@ -244,16 +234,15 @@ def get(user_id, tenant_id):
cur.execute( cur.execute(
cur.mogrify( cur.mogrify(
f"""SELECT f"""SELECT
users.user_id AS id, users.user_id,
email, email,
role, role,
name, name,
basic_authentication.generated_password,
(CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member, (CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member,
appearance, api_key,
api_key TRUE AS has_password
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
WHERE WHERE
users.user_id = %(userId)s users.user_id = %(userId)s
@ -262,7 +251,7 @@ def get(user_id, tenant_id):
{"userId": user_id}) {"userId": user_id})
) )
r = cur.fetchone() r = cur.fetchone()
return helper.dict_to_camel_case(r, ignore_keys=["appearance"]) return helper.dict_to_camel_case(r)
def generate_new_api_key(user_id): def generate_new_api_key(user_id):
@ -281,45 +270,39 @@ def generate_new_api_key(user_id):
return helper.dict_to_camel_case(r) return helper.dict_to_camel_case(r)
def edit(user_id_to_update, tenant_id, changes, editor_id): def edit(user_id_to_update, tenant_id, changes: schemas.EditUserSchema, editor_id):
ALLOW_EDIT = ["name", "email", "admin", "appearance"]
user = get(user_id=user_id_to_update, tenant_id=tenant_id) user = get(user_id=user_id_to_update, tenant_id=tenant_id)
if editor_id != user_id_to_update or "admin" in changes and changes["admin"] != user["admin"]: if editor_id != user_id_to_update or changes.admin is not None and changes.admin != user["admin"]:
admin = get(tenant_id=tenant_id, user_id=editor_id) admin = get(tenant_id=tenant_id, user_id=editor_id)
if not admin["superAdmin"] and not admin["admin"]: if not admin["superAdmin"] and not admin["admin"]:
return {"errors": ["unauthorized"]} return {"errors": ["unauthorized"]}
_changes = {}
if editor_id == user_id_to_update: if editor_id == user_id_to_update:
if user["superAdmin"]: if changes.admin is not None:
changes.pop("admin") if user["superAdmin"]:
elif user["admin"] != changes["admin"]: changes.admin = None
return {"errors": ["cannot change your own role"]} elif changes.admin != user["admin"]:
return {"errors": ["cannot change your own role"]}
keys = list(changes.keys()) if changes.email is not None and changes.email != user["email"]:
for k in keys: if email_exists(changes.email):
if k not in ALLOW_EDIT or changes[k] is None: return {"errors": ["email already exists."]}
changes.pop(k) if get_deleted_user_by_email(changes.email) is not None:
keys = list(changes.keys()) return {"errors": ["email previously deleted."]}
_changes["email"] = changes.email
if len(keys) > 0: if changes.name is not None and len(changes.name) > 0:
if "email" in keys and changes["email"] != user["email"]: _changes["name"] = changes.name
if email_exists(changes["email"]):
return {"errors": ["email already exists."]}
if get_deleted_user_by_email(changes["email"]) is not None:
return {"errors": ["email previously deleted."]}
if "admin" in keys:
changes["role"] = "admin" if changes.pop("admin") else "member"
if len(changes.keys()) > 0:
updated_user = update(tenant_id=tenant_id, user_id=user_id_to_update, changes=changes)
return {"data": updated_user} if changes.admin is not None:
_changes["role"] = "admin" if changes.admin else "member"
if len(_changes.keys()) > 0:
updated_user = update(tenant_id=tenant_id, user_id=user_id_to_update, changes=_changes)
return {"data": updated_user}
return {"data": user} return {"data": user}
def edit_appearance(user_id, tenant_id, changes):
updated_user = update(tenant_id=tenant_id, user_id=user_id, changes=changes)
return {"data": updated_user}
def get_by_email_only(email): def get_by_email_only(email):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
cur.execute( cur.execute(
@ -329,8 +312,7 @@ def get_by_email_only(email):
1 AS tenant_id, 1 AS tenant_id,
users.email, users.email,
users.role, users.role,
users.name, users.name,
basic_authentication.generated_password,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member
@ -353,8 +335,7 @@ def get_by_email_reset(email, reset_token):
1 AS tenant_id, 1 AS tenant_id,
users.email, users.email,
users.role, users.role,
users.name, users.name,
basic_authentication.generated_password,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member
@ -377,7 +358,7 @@ def get_members(tenant_id):
users.email, users.email,
users.role, users.role,
users.name, users.name,
basic_authentication.generated_password, users.created_at,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
@ -393,6 +374,7 @@ def get_members(tenant_id):
if len(r): if len(r):
r = helper.list_to_camel_case(r) r = helper.list_to_camel_case(r)
for u in r: for u in r:
u["createdAt"] = TimeUTC.datetime_to_timestamp(u["createdAt"])
if u["invitationToken"]: if u["invitationToken"]:
u["invitationLink"] = __get_invitation_link(u.pop("invitationToken")) u["invitationLink"] = __get_invitation_link(u.pop("invitationToken"))
else: else:
@ -440,7 +422,7 @@ def change_password(tenant_id, user_id, email, old_password, new_password):
auth = authenticate(email, old_password, for_change_password=True) auth = authenticate(email, old_password, for_change_password=True)
if auth is None: if auth is None:
return {"errors": ["wrong password"]} return {"errors": ["wrong password"]}
changes = {"password": new_password, "generatedPassword": False} changes = {"password": new_password}
user = update(tenant_id=tenant_id, user_id=user_id, changes=changes) user = update(tenant_id=tenant_id, user_id=user_id, changes=changes)
r = authenticate(user['email'], new_password) r = authenticate(user['email'], new_password)
tenant_id = r.pop("tenantId") tenant_id = r.pop("tenantId")
@ -466,7 +448,7 @@ def change_password(tenant_id, user_id, email, old_password, new_password):
def set_password_invitation(user_id, new_password): def set_password_invitation(user_id, new_password):
changes = {"password": new_password, "generatedPassword": False, changes = {"password": new_password,
"invitationToken": None, "invitedAt": None, "invitationToken": None, "invitedAt": None,
"changePwdExpireAt": None, "changePwdToken": None} "changePwdExpireAt": None, "changePwdToken": None}
user = update(tenant_id=-1, user_id=user_id, changes=changes) user = update(tenant_id=-1, user_id=user_id, changes=changes)
@ -562,28 +544,26 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
{"userId": user_id}) {"userId": user_id})
) )
r = cur.fetchone() r = cur.fetchone()
return r is not None \ return r is not None \
and r.get("jwt_iat") is not None \ and r.get("jwt_iat") is not None \
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \ and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \
or (jwt_aud.startswith("plugin") \ or (jwt_aud.startswith("plugin") \
and (r["changed_at"] is None \ and (r["changed_at"] is None \
or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000))) or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000)))
) )
def authenticate(email, password, for_change_password=False, for_plugin=False): def authenticate(email, password, for_change_password=False, for_plugin=False):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
query = cur.mogrify( query = cur.mogrify(
f"""SELECT f"""SELECT
users.user_id AS id, users.user_id,
1 AS tenant_id, 1 AS tenant_id,
users.role, users.role,
users.name, users.name,
basic_authentication.generated_password AS change_password,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member
users.appearance
FROM public.users INNER JOIN public.basic_authentication USING(user_id) FROM public.users INNER JOIN public.basic_authentication USING(user_id)
WHERE users.email = %(email)s WHERE users.email = %(email)s
AND basic_authentication.password = crypt(%(password)s, basic_authentication.password) AND basic_authentication.password = crypt(%(password)s, basic_authentication.password)
@ -597,16 +577,16 @@ def authenticate(email, password, for_change_password=False, for_plugin=False):
if r is not None: if r is not None:
if for_change_password: if for_change_password:
return True return True
r = helper.dict_to_camel_case(r, ignore_keys=["appearance"]) r = helper.dict_to_camel_case(r)
query = cur.mogrify( query = cur.mogrify(
f"""UPDATE public.users f"""UPDATE public.users
SET jwt_iat = timezone('utc'::text, now()) SET jwt_iat = timezone('utc'::text, now())
WHERE user_id = %(user_id)s WHERE user_id = %(user_id)s
RETURNING jwt_iat;""", RETURNING jwt_iat;""",
{"user_id": r["id"]}) {"user_id": r["userId"]})
cur.execute(query) cur.execute(query)
return { return {
"jwt": authorizers.generate_jwt(r['id'], r['tenantId'], "jwt": authorizers.generate_jwt(r['userId'], r['tenantId'],
TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]), TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]),
aud=f"plugin:{helper.get_stage_name()}" if for_plugin else f"front:{helper.get_stage_name()}"), aud=f"plugin:{helper.get_stage_name()}" if for_plugin else f"front:{helper.get_stage_name()}"),
"email": email, "email": email,

View file

@ -29,8 +29,12 @@ def edit_config(user_id, weekly_report):
def cron(): def cron():
if not helper.has_smtp():
print("!!! No SMTP configuration found, ignoring weekly report")
return
with pg_client.PostgresClient(long_query=True) as cur: with pg_client.PostgresClient(long_query=True) as cur:
params = {"3_days_ago": TimeUTC.midnight(delta_days=-3), params = {"tomorrow": TimeUTC.midnight(delta_days=1),
"3_days_ago": TimeUTC.midnight(delta_days=-3),
"1_week_ago": TimeUTC.midnight(delta_days=-7), "1_week_ago": TimeUTC.midnight(delta_days=-7),
"2_week_ago": TimeUTC.midnight(delta_days=-14), "2_week_ago": TimeUTC.midnight(delta_days=-14),
"5_week_ago": TimeUTC.midnight(delta_days=-35)} "5_week_ago": TimeUTC.midnight(delta_days=-35)}
@ -43,18 +47,18 @@ def cron():
COALESCE(week_0_issues.count, 0) AS this_week_issues_count, COALESCE(week_0_issues.count, 0) AS this_week_issues_count,
COALESCE(week_1_issues.count, 0) AS past_week_issues_count, COALESCE(week_1_issues.count, 0) AS past_week_issues_count,
COALESCE(month_1_issues.count, 0) AS past_month_issues_count COALESCE(month_1_issues.count, 0) AS past_month_issues_count
FROM public.projects FROM (SELECT project_id, name FROM public.projects WHERE projects.deleted_at ISNULL) AS projects
INNER JOIN LATERAL ( INNER JOIN LATERAL (
SELECT sessions.project_id SELECT sessions.project_id
FROM public.sessions FROM public.sessions
WHERE sessions.project_id = projects.project_id WHERE sessions.project_id = projects.project_id
AND start_ts >= %(3_days_ago)s AND start_ts >= %(3_days_ago)s
AND start_ts < %(tomorrow)s
LIMIT 1) AS recently_active USING (project_id) LIMIT 1) AS recently_active USING (project_id)
INNER JOIN LATERAL ( INNER JOIN LATERAL (
SELECT COALESCE(ARRAY_AGG(email), '{}') AS emails SELECT COALESCE(ARRAY_AGG(email), '{}') AS emails
FROM public.users FROM public.users
WHERE users.tenant_id = projects.tenant_id WHERE users.deleted_at ISNULL
AND users.deleted_at ISNULL
AND users.weekly_report AND users.weekly_report
) AS users ON (TRUE) ) AS users ON (TRUE)
LEFT JOIN LATERAL ( LEFT JOIN LATERAL (
@ -62,25 +66,25 @@ def cron():
FROM events_common.issues FROM events_common.issues
INNER JOIN public.sessions USING (session_id) INNER JOIN public.sessions USING (session_id)
WHERE sessions.project_id = projects.project_id WHERE sessions.project_id = projects.project_id
AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT AND issues.timestamp >= %(1_week_ago)s
AND issues.timestamp < %(tomorrow)s
) AS week_0_issues ON (TRUE) ) AS week_0_issues ON (TRUE)
LEFT JOIN LATERAL ( LEFT JOIN LATERAL (
SELECT COUNT(1) AS count SELECT COUNT(1) AS count
FROM events_common.issues FROM events_common.issues
INNER JOIN public.sessions USING (session_id) INNER JOIN public.sessions USING (session_id)
WHERE sessions.project_id = projects.project_id WHERE sessions.project_id = projects.project_id
AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT AND issues.timestamp <= %(1_week_ago)s
AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '2 week') * 1000)::BIGINT AND issues.timestamp >= %(2_week_ago)s
) AS week_1_issues ON (TRUE) ) AS week_1_issues ON (TRUE)
LEFT JOIN LATERAL ( LEFT JOIN LATERAL (
SELECT COUNT(1) AS count SELECT COUNT(1) AS count
FROM events_common.issues FROM events_common.issues
INNER JOIN public.sessions USING (session_id) INNER JOIN public.sessions USING (session_id)
WHERE sessions.project_id = projects.project_id WHERE sessions.project_id = projects.project_id
AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT AND issues.timestamp <= %(1_week_ago)s
AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '5 week') * 1000)::BIGINT AND issues.timestamp >= %(5_week_ago)s
) AS month_1_issues ON (TRUE) ) AS month_1_issues ON (TRUE);"""), params)
WHERE projects.deleted_at ISNULL;"""), params)
projects_data = cur.fetchall() projects_data = cur.fetchall()
emails_to_send = [] emails_to_send = []
for p in projects_data: for p in projects_data:

View file

@ -1,12 +1,13 @@
import math
import random import random
import re import re
import string import string
from typing import Union from typing import Union
import math
import requests import requests
import schemas import schemas
from chalicelib.utils.TimeUTC import TimeUTC
local_prefix = 'local-' local_prefix = 'local-'
from decouple import config from decouple import config
@ -364,10 +365,6 @@ def has_smtp():
return config("EMAIL_HOST") is not None and len(config("EMAIL_HOST")) > 0 return config("EMAIL_HOST") is not None and len(config("EMAIL_HOST")) > 0
def get_edition():
return "ee" if "ee" in config("ENTERPRISE_BUILD", default="").lower() else "foss"
def old_search_payload_to_flat(values): def old_search_payload_to_flat(values):
# in case the old search body was passed # in case the old search body was passed
if values.get("events") is not None: if values.get("events") is not None:
@ -384,3 +381,20 @@ def custom_alert_to_front(values):
if values.get("seriesId") is not None and values["query"]["left"] == schemas.AlertColumn.custom: if values.get("seriesId") is not None and values["query"]["left"] == schemas.AlertColumn.custom:
values["query"]["left"] = values["seriesId"] values["query"]["left"] = values["seriesId"]
return values return values
def __time_value(row):
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
factor = 1
if row["value"] > TimeUTC.MS_MINUTE:
row["value"] = row["value"] / TimeUTC.MS_MINUTE
row["unit"] = schemas.TemplatePredefinedUnits.minute
factor = TimeUTC.MS_MINUTE
elif row["value"] > 1 * 1000:
row["value"] = row["value"] / 1000
row["unit"] = schemas.TemplatePredefinedUnits.second
factor = 1000
if "chart" in row and factor > 1:
for r in row["chart"]:
r["value"] /= factor

View file

@ -52,7 +52,9 @@ def make_pool():
except (Exception, psycopg2.DatabaseError) as error: except (Exception, psycopg2.DatabaseError) as error:
print("Error while closing all connexions to PostgreSQL", error) print("Error while closing all connexions to PostgreSQL", error)
try: try:
postgreSQL_pool = ORThreadedConnectionPool(config("pg_minconn", cast=int, default=20), 100, **PG_CONFIG) postgreSQL_pool = ORThreadedConnectionPool(config("pg_minconn", cast=int, default=20),
config("pg_maxconn", cast=int, default=80),
**PG_CONFIG)
if (postgreSQL_pool): if (postgreSQL_pool):
print("Connection pool created successfully") print("Connection pool created successfully")
except (Exception, psycopg2.DatabaseError) as error: except (Exception, psycopg2.DatabaseError) as error:
@ -74,12 +76,17 @@ class PostgresClient:
cursor = None cursor = None
long_query = False long_query = False
def __init__(self, long_query=False): def __init__(self, long_query=False, unlimited_query=False):
self.long_query = long_query self.long_query = long_query
if long_query: if unlimited_query:
long_config = dict(_PG_CONFIG)
long_config["application_name"] += "-UNLIMITED"
self.connection = psycopg2.connect(**long_config)
elif long_query:
long_config = dict(_PG_CONFIG) long_config = dict(_PG_CONFIG)
long_config["application_name"] += "-LONG" long_config["application_name"] += "-LONG"
self.connection = psycopg2.connect(**_PG_CONFIG) long_config["options"] = f"-c statement_timeout={config('pg_long_timeout', cast=int, default=5*60) * 1000}"
self.connection = psycopg2.connect(**long_config)
else: else:
self.connection = postgreSQL_pool.getconn() self.connection = postgreSQL_pool.getconn()

View file

@ -5,11 +5,14 @@ import boto3
import botocore import botocore
from botocore.client import Config from botocore.client import Config
client = boto3.client('s3', endpoint_url=config("S3_HOST"), if not config("S3_HOST", default=False):
aws_access_key_id=config("S3_KEY"), client = boto3.client('s3')
aws_secret_access_key=config("S3_SECRET"), else:
config=Config(signature_version='s3v4'), client = boto3.client('s3', endpoint_url=config("S3_HOST"),
region_name=config("sessions_region")) aws_access_key_id=config("S3_KEY"),
aws_secret_access_key=config("S3_SECRET"),
config=Config(signature_version='s3v4'),
region_name=config("sessions_region"))
def exists(bucket, key): def exists(bucket, key):

30
api/development.md Normal file
View file

@ -0,0 +1,30 @@
### Prerequisites
- [Vagrant](../scripts/vagrant/README.md)
- Python 3.9
- Pipenv
### Development environment
```bash
cd openreplay/api
# Make your own copy of .env file and edit it as you want
cp .env.dev .env
# Create a .venv folder to contain all you dependencies
mkdir .venv
# Installing dependencies (pipenv will detect the .venv folder and use it as a target)
pipenv install -r requirements.txt [--skip-lock]
```
### Building and deploying locally
```bash
cd openreplay-contributions
vagrant ssh
cd openreplay-dev/openreplay/scripts/helmcharts
# For complete list of options
# bash local_deploy.sh help
bash local_deploy.sh api
```

View file

@ -36,7 +36,8 @@ pg_password=asayerPostgres
pg_port=5432 pg_port=5432
pg_user=postgres pg_user=postgres
pg_timeout=30 pg_timeout=30
pg_minconn=45 pg_minconn=20
pg_maxconn=50
PG_RETRY_MAX=50 PG_RETRY_MAX=50
PG_RETRY_INTERVAL=2 PG_RETRY_INTERVAL=2
put_S3_TTL=20 put_S3_TTL=20
@ -44,6 +45,6 @@ sentryURL=
sessions_bucket=mobs sessions_bucket=mobs
sessions_region=us-east-1 sessions_region=us-east-1
sourcemaps_bucket=sourcemaps sourcemaps_bucket=sourcemaps
sourcemaps_reader=http://127.0.0.1:9000/ sourcemaps_reader=http://127.0.0.1:9000/sourcemaps
stage=default-foss stage=default-foss
version_number=1.4.0 version_number=1.4.0

View file

@ -33,7 +33,9 @@ class ORRoute(APIRoute):
if isinstance(response, JSONResponse): if isinstance(response, JSONResponse):
response: JSONResponse = response response: JSONResponse = response
body = json.loads(response.body.decode('utf8')) body = json.loads(response.body.decode('utf8'))
if response.status_code == 200 and body is not None and body.get("errors") is not None: if response.status_code == 200 \
and body is not None and isinstance(body, dict) \
and body.get("errors") is not None:
if "not found" in body["errors"][0]: if "not found" in body["errors"][0]:
response.status_code = status.HTTP_404_NOT_FOUND response.status_code = status.HTTP_404_NOT_FOUND
else: else:

View file

@ -1,15 +1,15 @@
requests==2.26.0 requests==2.28.0
urllib3==1.26.6 urllib3==1.26.9
boto3==1.16.1 boto3==1.24.11
pyjwt==1.7.1 pyjwt==2.4.0
psycopg2-binary==2.8.6 psycopg2-binary==2.9.3
elasticsearch==7.9.1 elasticsearch==8.2.3
jira==3.1.1 jira==3.2.0
fastapi==0.75.0 fastapi==0.78.0
uvicorn[standard]==0.17.5 uvicorn[standard]==0.17.6
python-decouple==3.6 python-decouple==3.6
pydantic[email]==1.8.2 pydantic[email]==1.9.1
apscheduler==3.8.1 apscheduler==3.9.1

View file

@ -1,7 +1,8 @@
from typing import Union from typing import Union, Optional
from decouple import config from decouple import config
from fastapi import Depends, Body, BackgroundTasks from fastapi import Depends, Body, BackgroundTasks, HTTPException
from starlette import status
import schemas import schemas
from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assignments, projects, \ from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assignments, projects, \
@ -13,7 +14,7 @@ from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assig
assist, heatmaps, mobile, signup, tenants, errors_favorite_viewed, boarding, notifications, webhook, users, \ assist, heatmaps, mobile, signup, tenants, errors_favorite_viewed, boarding, notifications, webhook, users, \
custom_metrics, saved_search custom_metrics, saved_search
from chalicelib.core.collaboration_slack import Slack from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import email_helper from chalicelib.utils import email_helper, helper, captcha
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
from or_dependencies import OR_context from or_dependencies import OR_context
from routers.base import get_routers from routers.base import get_routers
@ -21,6 +22,34 @@ from routers.base import get_routers
public_app, app, app_apikey = get_routers() public_app, app, app_apikey = get_routers()
@public_app.post('/login', tags=["authentication"])
def login(data: schemas.UserLoginSchema = Body(...)):
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid captcha."
)
r = users.authenticate(data.email, data.password, for_plugin=False)
if r is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Youve entered invalid Email or Password."
)
if "errors" in r:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=r["errors"][0]
)
r["smtp"] = helper.has_smtp()
return {
'jwt': r.pop('jwt'),
'data': {
"user": r
}
}
@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"]) @app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"])
@app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"]) @app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"])
def get_session2(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, def get_session2(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
@ -107,10 +136,12 @@ def events_search(projectId: int, q: str,
type: Union[schemas.FilterType, schemas.EventType, type: Union[schemas.FilterType, schemas.EventType,
schemas.PerformanceEventType, schemas.FetchFilterType, schemas.PerformanceEventType, schemas.FetchFilterType,
schemas.GraphqlFilterType] = None, schemas.GraphqlFilterType] = None,
key: str = None, key: str = None, source: str = None, live: bool = False,
source: str = None, context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if len(q) == 0: if len(q) == 0:
return {"data": []} return {"data": []}
if live:
return assist.autocomplete(project_id=projectId, q=q, key=key)
if type in [schemas.FetchFilterType._url]: if type in [schemas.FetchFilterType._url]:
type = schemas.EventType.request type = schemas.EventType.request
elif type in [schemas.GraphqlFilterType._name]: elif type in [schemas.GraphqlFilterType._name]:
@ -743,8 +774,8 @@ def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas.
@app.get('/{projectId}/funnels/issues/{issueId}/sessions', tags=["funnels"]) @app.get('/{projectId}/funnels/issues/{issueId}/sessions', tags=["funnels"])
def get_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None, def get_funnel_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
issue = issues.get(project_id=projectId, issue_id=issueId) issue = issues.get(project_id=projectId, issue_id=issueId)
if issue is None: if issue is None:
return {"errors": ["issue not found"]} return {"errors": ["issue not found"]}
@ -829,8 +860,15 @@ def all_issue_types(context: schemas.CurrentContext = Depends(OR_context)):
@app.get('/{projectId}/assist/sessions', tags=["assist"]) @app.get('/{projectId}/assist/sessions', tags=["assist"])
def sessions_live(projectId: int, userId: str = None, context: schemas.CurrentContext = Depends(OR_context)): def get_sessions_live(projectId: int, userId: str = None, context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_sessions_ws(projectId, user_id=userId) data = assist.get_live_sessions_ws_user_id(projectId, user_id=userId)
return {'data': data}
@app.post('/{projectId}/assist/sessions', tags=["assist"])
def sessions_live(projectId: int, data: schemas.LiveSessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_sessions_ws(projectId, body=data)
return {'data': data} return {'data': data}
@ -903,7 +941,7 @@ def edit_client(data: schemas.UpdateTenantSchema = Body(...),
@app.post('/{projectId}/errors/search', tags=['errors']) @app.post('/{projectId}/errors/search', tags=['errors'])
def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...), def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return errors.search(data, projectId, user_id=context.user_id) return {"data": errors.search(data, projectId, user_id=context.user_id)}
@app.get('/{projectId}/errors/stats', tags=['errors']) @app.get('/{projectId}/errors/stats', tags=['errors'])
@ -966,6 +1004,11 @@ def get_notifications(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": notifications.get_all(tenant_id=context.tenant_id, user_id=context.user_id)} return {"data": notifications.get_all(tenant_id=context.tenant_id, user_id=context.user_id)}
@app.get('/notifications/count', tags=['notifications'])
def get_notifications_count(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": notifications.get_all_count(tenant_id=context.tenant_id, user_id=context.user_id)}
@app.get('/notifications/{notificationId}/view', tags=['notifications']) @app.get('/notifications/{notificationId}/view', tags=['notifications'])
def view_notifications(notificationId: int, context: schemas.CurrentContext = Depends(OR_context)): def view_notifications(notificationId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": notifications.view_notification(notification_ids=[notificationId], user_id=context.user_id)} return {"data": notifications.view_notification(notification_ids=[notificationId], user_id=context.user_id)}
@ -1071,17 +1114,10 @@ def generate_new_user_token(context: schemas.CurrentContext = Depends(OR_context
@app.put('/account', tags=["account"]) @app.put('/account', tags=["account"])
def edit_account(data: schemas.EditUserSchema = Body(...), def edit_account(data: schemas.EditUserSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data.dict(), return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data,
editor_id=context.user_id) editor_id=context.user_id)
@app.post('/account/appearance', tags=["account"])
@app.put('/account/appearance', tags=["account"])
def edit_account_appearance(data: schemas.EditUserAppearanceSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return users.edit_appearance(tenant_id=context.tenant_id, user_id=context.user_id, changes=data.dict())
@app.post('/account/password', tags=["account"]) @app.post('/account/password', tags=["account"])
@app.put('/account/password', tags=["account"]) @app.put('/account/password', tags=["account"])
def change_client_password(data: schemas.EditUserPasswordSchema = Body(...), def change_client_password(data: schemas.EditUserPasswordSchema = Body(...),
@ -1120,6 +1156,16 @@ def delete_saved_search(projectId: int, search_id: int, context: schemas.Current
return {"data": saved_search.delete(project_id=projectId, user_id=context.user_id, search_id=search_id)} return {"data": saved_search.delete(project_id=projectId, user_id=context.user_id, search_id=search_id)}
@app.get('/limits', tags=['accounts'])
def get_limits(context: schemas.CurrentContext = Depends(OR_context)):
return {
'data': {
"teamMember": -1,
"projects": -1,
}
}
@public_app.get('/', tags=["health"]) @public_app.get('/', tags=["health"])
@public_app.post('/', tags=["health"]) @public_app.post('/', tags=["health"])
@public_app.put('/', tags=["health"]) @public_app.put('/', tags=["health"])

View file

@ -1,17 +1,15 @@
from typing import Optional from typing import Optional
from decouple import config from decouple import config
from fastapi import Body, Depends, HTTPException, status, BackgroundTasks from fastapi import Body, Depends, BackgroundTasks
from starlette.responses import RedirectResponse from starlette.responses import RedirectResponse
import schemas import schemas
from chalicelib.core import assist
from chalicelib.core import integrations_manager from chalicelib.core import integrations_manager
from chalicelib.core import sessions from chalicelib.core import sessions
from chalicelib.core import tenants, users, metadata, projects, license from chalicelib.core import tenants, users, metadata, projects, license
from chalicelib.core import webhook from chalicelib.core import webhook
from chalicelib.core.collaboration_slack import Slack from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import captcha
from chalicelib.utils import helper from chalicelib.utils import helper
from or_dependencies import OR_context from or_dependencies import OR_context
from routers.base import get_routers from routers.base import get_routers
@ -24,60 +22,23 @@ def get_all_signup():
return {"data": {"tenants": tenants.tenants_exists(), return {"data": {"tenants": tenants.tenants_exists(),
"sso": None, "sso": None,
"ssoProvider": None, "ssoProvider": None,
"edition": helper.get_edition()}} "edition": license.EDITION}}
@public_app.post('/login', tags=["authentication"])
def login(data: schemas.UserLoginSchema = Body(...)):
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid captcha."
)
r = users.authenticate(data.email, data.password, for_plugin=False)
if r is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Youve entered invalid Email or Password."
)
tenant_id = r.pop("tenantId")
r["limits"] = {
"teamMember": -1,
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(tenant_id)}
c = tenants.get_by_tenant_id(tenant_id)
c.pop("createdAt")
c["smtp"] = helper.has_smtp()
c["iceServers"] = assist.get_ice_servers()
r["smtp"] = c["smtp"]
r["iceServers"] = c["iceServers"]
return {
'jwt': r.pop('jwt'),
'data': {
"user": r,
"client": c
}
}
@app.get('/account', tags=['accounts']) @app.get('/account', tags=['accounts'])
def get_account(context: schemas.CurrentContext = Depends(OR_context)): def get_account(context: schemas.CurrentContext = Depends(OR_context)):
r = users.get(tenant_id=context.tenant_id, user_id=context.user_id) r = users.get(tenant_id=context.tenant_id, user_id=context.user_id)
t = tenants.get_by_tenant_id(context.tenant_id)
if t is not None:
t.pop("createdAt")
t["tenantName"] = t.pop("name")
return { return {
'data': { 'data': {
**r, **r,
"limits": { **t,
"teamMember": -1,
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(context.tenant_id)
},
**license.get_status(context.tenant_id), **license.get_status(context.tenant_id),
"smtp": helper.has_smtp(), "smtp": helper.has_smtp(),
"iceServers": assist.get_ice_servers() # "iceServers": assist.get_ice_servers()
} }
} }
@ -181,7 +142,7 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema =
@app.post('/client/members/{memberId}', tags=["client"]) @app.post('/client/members/{memberId}', tags=["client"])
def edit_member(memberId: int, data: schemas.EditMemberSchema, def edit_member(memberId: int, data: schemas.EditMemberSchema,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data.dict(), return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data,
user_id_to_update=memberId) user_id_to_update=memberId)
@ -199,28 +160,11 @@ def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] =
m_key=key, project_id=projectId)} m_key=key, project_id=projectId)}
@app.get('/plans', tags=["plan"])
def get_current_plan(context: schemas.CurrentContext = Depends(OR_context)):
return {
"data": license.get_status(context.tenant_id)
}
@public_app.get('/general_stats', tags=["private"], include_in_schema=False) @public_app.get('/general_stats', tags=["private"], include_in_schema=False)
def get_general_stats(): def get_general_stats():
return {"data": {"sessions:": sessions.count_all()}} return {"data": {"sessions:": sessions.count_all()}}
@app.get('/client', tags=['projects'])
def get_client(context: schemas.CurrentContext = Depends(OR_context)):
r = tenants.get_by_tenant_id(context.tenant_id)
if r is not None:
r.pop("createdAt")
return {
'data': r
}
@app.get('/projects', tags=['projects']) @app.get('/projects', tags=['projects'])
def get_projects(context: schemas.CurrentContext = Depends(OR_context)): def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True, return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True,

View file

@ -1,7 +1,7 @@
from fastapi import Body, Depends from fastapi import Body, Depends
import schemas import schemas
from chalicelib.core import dashboards, custom_metrics from chalicelib.core import dashboards, custom_metrics, funnels
from or_dependencies import OR_context from or_dependencies import OR_context
from routers.base import get_routers from routers.base import get_routers
@ -102,18 +102,29 @@ def get_templates(projectId: int, context: schemas.CurrentContext = Depends(OR_c
@app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"]) @app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"])
def try_custom_metric(projectId: int, data: schemas.TryCustomMetricsPayloadSchema = Body(...), def try_custom_metric(projectId: int, data: schemas.TryCustomMetricsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.merged_live(project_id=projectId, data=data)} return {"data": custom_metrics.merged_live(project_id=projectId, data=data, user_id=context.user_id)}
@app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"]) @app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"]) @app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"])
def try_custom_metric_sessions(projectId: int, def try_custom_metric_sessions(projectId: int, data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.try_sessions(project_id=projectId, user_id=context.user_id, data=data) data = custom_metrics.try_sessions(project_id=projectId, user_id=context.user_id, data=data)
return {"data": data} return {"data": data}
@app.post('/{projectId}/metrics/try/issues', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try/issues', tags=["customMetrics"])
def try_custom_metric_funnel_issues(projectId: int, data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if len(data.series) == 0:
return {"data": []}
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
data = funnels.get_issues_on_the_fly_widget(project_id=projectId, data=data.series[0].filter)
return {"data": data}
@app.post('/{projectId}/metrics', tags=["dashboard"]) @app.post('/{projectId}/metrics', tags=["dashboard"])
@app.put('/{projectId}/metrics', tags=["dashboard"]) @app.put('/{projectId}/metrics', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics', tags=["customMetrics"]) @app.post('/{projectId}/custom_metrics', tags=["customMetrics"])
@ -149,6 +160,42 @@ def get_custom_metric_sessions(projectId: int, metric_id: int,
return {"data": data} return {"data": data}
@app.post('/{projectId}/metrics/{metric_id}/issues', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/issues', tags=["customMetrics"])
def get_custom_metric_funnel_issues(projectId: int, metric_id: int,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data)
if data is None:
return {"errors": ["custom metric not found"]}
return {"data": data}
@app.post('/{projectId}/metrics/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/issues/{issueId}/sessions', tags=["customMetrics"])
def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_funnel_sessions_by_issue(project_id=projectId, user_id=context.user_id,
metric_id=metric_id, issue_id=issueId, data=data)
if data is None:
return {"errors": ["custom metric not found"]}
return {"data": data}
@app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"])
def get_custom_metric_errors_list(projectId: int, metric_id: int,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data)
if data is None:
return {"errors": ["custom metric not found"]}
return {"data": data}
@app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"]) @app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"]) @app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"])
def get_custom_metric_chart(projectId: int, metric_id: int, data: schemas.CustomMetricChartPayloadSchema = Body(...), def get_custom_metric_chart(projectId: int, metric_id: int, data: schemas.CustomMetricChartPayloadSchema = Body(...),

View file

@ -12,7 +12,8 @@ public_app, app, app_apikey = get_routers()
@app_apikey.get('/v1/{projectKey}/users/{userId}/sessions', tags=["api"]) @app_apikey.get('/v1/{projectKey}/users/{userId}/sessions', tags=["api"])
def get_user_sessions(projectKey: str, userId: str, start_date: int = None, end_date: int = None): def get_user_sessions(projectKey: str, userId: str, start_date: int = None, end_date: int = None):
projectId = projects.get_internal_project_id(projectKey) projectId = projects.get_internal_project_id(projectKey)
if projectId is None:
return {"errors": ["invalid projectKey"]}
return { return {
'data': sessions.get_user_sessions( 'data': sessions.get_user_sessions(
project_id=projectId, project_id=projectId,
@ -26,6 +27,8 @@ def get_user_sessions(projectKey: str, userId: str, start_date: int = None, end_
@app_apikey.get('/v1/{projectKey}/sessions/{sessionId}/events', tags=["api"]) @app_apikey.get('/v1/{projectKey}/sessions/{sessionId}/events', tags=["api"])
def get_session_events(projectKey: str, sessionId: int): def get_session_events(projectKey: str, sessionId: int):
projectId = projects.get_internal_project_id(projectKey) projectId = projects.get_internal_project_id(projectKey)
if projectId is None:
return {"errors": ["invalid projectKey"]}
return { return {
'data': events.get_by_sessionId2_pg( 'data': events.get_by_sessionId2_pg(
project_id=projectId, project_id=projectId,
@ -37,6 +40,8 @@ def get_session_events(projectKey: str, sessionId: int):
@app_apikey.get('/v1/{projectKey}/users/{userId}', tags=["api"]) @app_apikey.get('/v1/{projectKey}/users/{userId}', tags=["api"])
def get_user_details(projectKey: str, userId: str): def get_user_details(projectKey: str, userId: str):
projectId = projects.get_internal_project_id(projectKey) projectId = projects.get_internal_project_id(projectKey)
if projectId is None:
return {"errors": ["invalid projectKey"]}
return { return {
'data': sessions.get_session_user( 'data': sessions.get_session_user(
project_id=projectId, project_id=projectId,
@ -48,6 +53,8 @@ def get_user_details(projectKey: str, userId: str):
@app_apikey.delete('/v1/{projectKey}/users/{userId}', tags=["api"]) @app_apikey.delete('/v1/{projectKey}/users/{userId}', tags=["api"])
def schedule_to_delete_user_data(projectKey: str, userId: str): def schedule_to_delete_user_data(projectKey: str, userId: str):
projectId = projects.get_internal_project_id(projectKey) projectId = projects.get_internal_project_id(projectKey)
if projectId is None:
return {"errors": ["invalid projectKey"]}
data = {"action": "delete_user_data", data = {"action": "delete_user_data",
"reference_id": userId, "reference_id": userId,
"description": f"Delete user sessions of userId = {userId}", "description": f"Delete user sessions of userId = {userId}",
@ -61,6 +68,8 @@ def schedule_to_delete_user_data(projectKey: str, userId: str):
@app_apikey.get('/v1/{projectKey}/jobs', tags=["api"]) @app_apikey.get('/v1/{projectKey}/jobs', tags=["api"])
def get_jobs(projectKey: str): def get_jobs(projectKey: str):
projectId = projects.get_internal_project_id(projectKey) projectId = projects.get_internal_project_id(projectKey)
if projectId is None:
return {"errors": ["invalid projectKey"]}
return { return {
'data': jobs.get_all(project_id=projectId) 'data': jobs.get_all(project_id=projectId)
} }

View file

@ -12,7 +12,7 @@ def attribute_to_camel_case(snake_str):
def transform_email(email: str) -> str: def transform_email(email: str) -> str:
return email.lower() if isinstance(email, str) else email return email.lower().strip() if isinstance(email, str) else email
class _Grecaptcha(BaseModel): class _Grecaptcha(BaseModel):
@ -37,16 +37,11 @@ class UserSignupSchema(UserLoginSchema):
class EditUserSchema(BaseModel): class EditUserSchema(BaseModel):
name: Optional[str] = Field(None) name: Optional[str] = Field(None)
email: Optional[EmailStr] = Field(None) email: Optional[EmailStr] = Field(None)
admin: Optional[bool] = Field(False) admin: Optional[bool] = Field(None)
appearance: Optional[dict] = Field({})
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditUserAppearanceSchema(BaseModel):
appearance: dict = Field(...)
class ForgetPasswordPayloadSchema(_Grecaptcha): class ForgetPasswordPayloadSchema(_Grecaptcha):
email: EmailStr = Field(...) email: EmailStr = Field(...)
@ -132,13 +127,11 @@ class CreateMemberSchema(BaseModel):
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditMemberSchema(BaseModel): class EditMemberSchema(EditUserSchema):
name: str = Field(...) name: str = Field(...)
email: EmailStr = Field(...) email: EmailStr = Field(...)
admin: bool = Field(False) admin: bool = Field(False)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditPasswordByInvitationSchema(BaseModel): class EditPasswordByInvitationSchema(BaseModel):
invitation: str = Field(...) invitation: str = Field(...)
@ -486,6 +479,10 @@ class IssueType(str, Enum):
js_exception = 'js_exception' js_exception = 'js_exception'
class MetricFormatType(str, Enum):
session_count = 'sessionCount'
class __MixedSearchFilter(BaseModel): class __MixedSearchFilter(BaseModel):
is_event: bool = Field(...) is_event: bool = Field(...)
@ -618,17 +615,28 @@ class _PaginatedSchema(BaseModel):
page: int = Field(default=1, gt=0) page: int = Field(default=1, gt=0)
class SortOrderType(str, Enum):
asc = "ASC"
desc = "DESC"
class SessionsSearchPayloadSchema(_PaginatedSchema): class SessionsSearchPayloadSchema(_PaginatedSchema):
events: List[_SessionSearchEventSchema] = Field([]) events: List[_SessionSearchEventSchema] = Field([])
filters: List[SessionSearchFilterSchema] = Field([]) filters: List[SessionSearchFilterSchema] = Field([])
startDate: int = Field(None) startDate: int = Field(None)
endDate: int = Field(None) endDate: int = Field(None)
sort: str = Field(default="startTs") sort: str = Field(default="startTs")
order: Literal["asc", "desc"] = Field(default="desc") order: SortOrderType = Field(default=SortOrderType.desc)
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then) events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then)
group_by_user: bool = Field(default=False) group_by_user: bool = Field(default=False)
bookmarked: bool = Field(default=False) bookmarked: bool = Field(default=False)
@root_validator(pre=True)
def transform_order(cls, values):
if values.get("order") is not None:
values["order"] = values["order"].upper()
return values
class Config: class Config:
alias_generator = attribute_to_camel_case alias_generator = attribute_to_camel_case
@ -757,8 +765,7 @@ class MobileSignPayloadSchema(BaseModel):
keys: List[str] = Field(...) keys: List[str] = Field(...)
class CustomMetricSeriesFilterSchema(FlatSessionsSearchPayloadSchema): class CustomMetricSeriesFilterSchema(FlatSessionsSearchPayloadSchema, SearchErrorsSchema):
# class CustomMetricSeriesFilterSchema(SessionsSearchPayloadSchema):
startDate: Optional[int] = Field(None) startDate: Optional[int] = Field(None)
endDate: Optional[int] = Field(None) endDate: Optional[int] = Field(None)
sort: Optional[str] = Field(None) sort: Optional[str] = Field(None)
@ -790,6 +797,8 @@ class MetricTableViewType(str, Enum):
class MetricType(str, Enum): class MetricType(str, Enum):
timeseries = "timeseries" timeseries = "timeseries"
table = "table" table = "table"
predefined = "predefined"
funnel = "funnel"
class TableMetricOfType(str, Enum): class TableMetricOfType(str, Enum):
@ -800,6 +809,8 @@ class TableMetricOfType(str, Enum):
user_id = FilterType.user_id.value user_id = FilterType.user_id.value
issues = FilterType.issue.value issues = FilterType.issue.value
visited_url = EventType.location.value visited_url = EventType.location.value
sessions = "SESSIONS"
errors = IssueType.js_exception.value
class TimeseriesMetricOfType(str, Enum): class TimeseriesMetricOfType(str, Enum):
@ -815,7 +826,7 @@ class CustomMetricSessionsPayloadSchema(FlatSessionsSearch, _PaginatedSchema):
alias_generator = attribute_to_camel_case alias_generator = attribute_to_camel_case
class CustomMetricChartPayloadSchema(CustomMetricSessionsPayloadSchema): class CustomMetricChartPayloadSchema(CustomMetricSessionsPayloadSchema, _PaginatedSchema):
density: int = Field(7) density: int = Field(7)
class Config: class Config:
@ -830,7 +841,7 @@ class TryCustomMetricsPayloadSchema(CustomMetricChartPayloadSchema):
metric_type: MetricType = Field(MetricType.timeseries) metric_type: MetricType = Field(MetricType.timeseries)
metric_of: Union[TableMetricOfType, TimeseriesMetricOfType] = Field(TableMetricOfType.user_id) metric_of: Union[TableMetricOfType, TimeseriesMetricOfType] = Field(TableMetricOfType.user_id)
metric_value: List[IssueType] = Field([]) metric_value: List[IssueType] = Field([])
metric_format: Optional[str] = Field(None) metric_format: Optional[MetricFormatType] = Field(None)
# metricFraction: float = Field(None, gt=0, lt=1) # metricFraction: float = Field(None, gt=0, lt=1)
# This is used to handle wrong values sent by the UI # This is used to handle wrong values sent by the UI
@ -863,8 +874,23 @@ class TryCustomMetricsPayloadSchema(CustomMetricChartPayloadSchema):
alias_generator = attribute_to_camel_case alias_generator = attribute_to_camel_case
class CustomMetricsConfigSchema(BaseModel):
col: Optional[int] = Field(default=2)
row: Optional[int] = Field(default=2)
position: Optional[int] = Field(default=0)
class CreateCustomMetricsSchema(TryCustomMetricsPayloadSchema): class CreateCustomMetricsSchema(TryCustomMetricsPayloadSchema):
series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1) series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1)
config: CustomMetricsConfigSchema = Field(default=CustomMetricsConfigSchema())
@root_validator(pre=True)
def transform_series(cls, values):
if values.get("series") is not None and len(values["series"]) > 1 and values.get(
"metric_type") == MetricType.funnel.value:
values["series"] = [values["series"][0]]
return values
class CustomMetricUpdateSeriesSchema(CustomMetricCreateSeriesSchema): class CustomMetricUpdateSeriesSchema(CustomMetricCreateSeriesSchema):
@ -888,6 +914,7 @@ class SavedSearchSchema(FunnelSchema):
class CreateDashboardSchema(BaseModel): class CreateDashboardSchema(BaseModel):
name: str = Field(..., min_length=1) name: str = Field(..., min_length=1)
description: Optional[str] = Field(default='')
is_public: bool = Field(default=False) is_public: bool = Field(default=False)
is_pinned: bool = Field(default=False) is_pinned: bool = Field(default=False)
metrics: Optional[List[int]] = Field(default=[]) metrics: Optional[List[int]] = Field(default=[])
@ -966,6 +993,7 @@ class TemplatePredefinedKeys(str, Enum):
class TemplatePredefinedUnits(str, Enum): class TemplatePredefinedUnits(str, Enum):
millisecond = "ms" millisecond = "ms"
second = "s"
minute = "min" minute = "min"
memory = "mb" memory = "mb"
frame = "f/s" frame = "f/s"
@ -980,3 +1008,62 @@ class CustomMetricAndTemplate(BaseModel):
class Config: class Config:
alias_generator = attribute_to_camel_case alias_generator = attribute_to_camel_case
class LiveFilterType(str, Enum):
user_os = FilterType.user_os.value
user_browser = FilterType.user_browser.value
user_device = FilterType.user_device.value
user_country = FilterType.user_country.value
user_id = FilterType.user_id.value
user_anonymous_id = FilterType.user_anonymous_id.value
rev_id = FilterType.rev_id.value
platform = FilterType.platform.value
page_title = "PAGETITLE"
session_id = "SESSIONID"
metadata = "METADATA"
user_UUID = "USERUUID"
tracker_version = "TRACKERVERSION"
user_browser_version = "USERBROWSERVERSION"
user_device_type = "USERDEVICETYPE",
class LiveSessionSearchFilterSchema(BaseModel):
value: Union[List[str], str] = Field(...)
type: LiveFilterType = Field(...)
source: Optional[str] = Field(None)
@root_validator
def validator(cls, values):
if values.get("type") is not None and values["type"] == LiveFilterType.metadata.value:
assert values.get("source") is not None, "source should not be null for METADATA type"
assert len(values.get("source")) > 0, "source should not be empty for METADATA type"
return values
class LiveSessionsSearchPayloadSchema(_PaginatedSchema):
filters: List[LiveSessionSearchFilterSchema] = Field([])
sort: Union[LiveFilterType, str] = Field(default="TIMESTAMP")
order: SortOrderType = Field(default=SortOrderType.desc)
@root_validator(pre=True)
def transform(cls, values):
if values.get("order") is not None:
values["order"] = values["order"].upper()
if values.get("filters") is not None:
i = 0
while i < len(values["filters"]):
if values["filters"][i]["value"] is None or len(values["filters"][i]["value"]) == 0:
del values["filters"][i]
else:
i += 1
for i in values["filters"]:
if i.get("type") == LiveFilterType.platform.value:
i["type"] = LiveFilterType.user_device_type.value
if values.get("sort") is not None:
if values["sort"].lower() == "startts":
values["sort"] = "TIMESTAMP"
return values
class Config:
alias_generator = attribute_to_camel_case

6
backend/.dockerignore Normal file
View file

@ -0,0 +1,6 @@
# ignore .git and .cache folders
.git
.cache
**/build.sh
**/build_*.sh
**/*deploy.sh

View file

@ -6,17 +6,20 @@ WORKDIR /root
COPY go.mod . COPY go.mod .
COPY go.sum . COPY go.sum .
RUN go mod download RUN go mod tidy && go mod download
FROM prepare AS build FROM prepare AS build
COPY cmd cmd
COPY pkg pkg COPY pkg pkg
COPY services services COPY internal internal
RUN go mod tidy
ARG SERVICE_NAME ARG SERVICE_NAME
RUN CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o service -tags musl openreplay/backend/services/$SERVICE_NAME RUN CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o service -tags musl openreplay/backend/cmd/$SERVICE_NAME
FROM alpine
FROM alpine AS entrypoint
RUN apk add --no-cache ca-certificates RUN apk add --no-cache ca-certificates
ENV TZ=UTC \ ENV TZ=UTC \
@ -25,10 +28,10 @@ ENV TZ=UTC \
MAXMINDDB_FILE=/root/geoip.mmdb \ MAXMINDDB_FILE=/root/geoip.mmdb \
UAPARSER_FILE=/root/regexes.yaml \ UAPARSER_FILE=/root/regexes.yaml \
HTTP_PORT=80 \ HTTP_PORT=80 \
BEACON_SIZE_LIMIT=7000000 \ BEACON_SIZE_LIMIT=3000000 \
KAFKA_USE_SSL=true \ KAFKA_USE_SSL=true \
KAFKA_MAX_POLL_INTERVAL_MS=400000 \ KAFKA_MAX_POLL_INTERVAL_MS=400000 \
REDIS_STREAMS_MAX_LEN=3000 \ REDIS_STREAMS_MAX_LEN=10000 \
TOPIC_RAW_WEB=raw \ TOPIC_RAW_WEB=raw \
TOPIC_RAW_IOS=raw-ios \ TOPIC_RAW_IOS=raw-ios \
TOPIC_CACHE=cache \ TOPIC_CACHE=cache \
@ -39,13 +42,19 @@ ENV TZ=UTC \
GROUP_DB=db \ GROUP_DB=db \
GROUP_ENDER=ender \ GROUP_ENDER=ender \
GROUP_CACHE=cache \ GROUP_CACHE=cache \
GROUP_HEURISTICS=heuristics \
AWS_REGION_WEB=eu-central-1 \ AWS_REGION_WEB=eu-central-1 \
AWS_REGION_IOS=eu-west-1 \ AWS_REGION_IOS=eu-west-1 \
AWS_REGION_ASSETS=eu-central-1 \ AWS_REGION_ASSETS=eu-central-1 \
CACHE_ASSETS=true \ CACHE_ASSETS=true \
ASSETS_SIZE_LIMIT=6291456 \ ASSETS_SIZE_LIMIT=6291456 \
FS_CLEAN_HRS=72 \ FS_CLEAN_HRS=72 \
LOG_QUEUE_STATS_INTERVAL_SEC=60 FILE_SPLIT_SIZE=300000 \
LOG_QUEUE_STATS_INTERVAL_SEC=60 \
DB_BATCH_QUEUE_LIMIT=20 \
DB_BATCH_SIZE_LIMIT=10000000 \
PARTITIONS_NUMBER=16 \
QUEUE_MESSAGE_SIZE_LIMIT=1048576
ARG SERVICE_NAME ARG SERVICE_NAME

View file

@ -1,4 +1,4 @@
FROM golang:1.13-alpine3.10 AS prepare FROM golang:1.18-alpine3.15 AS prepare
RUN apk add --no-cache git openssh openssl-dev pkgconf gcc g++ make libc-dev bash RUN apk add --no-cache git openssh openssl-dev pkgconf gcc g++ make libc-dev bash
@ -10,13 +10,13 @@ RUN go mod download
FROM prepare AS build FROM prepare AS build
COPY cmd cmd
COPY pkg pkg COPY pkg pkg
COPY services services COPY internal internal
RUN for name in alerts assets db ender http integrations sink storage;do CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o bin/$name -tags musl openreplay/backend/services/$name; done RUN for name in assets db ender http integrations sink storage;do CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o bin/$name -tags musl openreplay/backend/cmd/$name; done
FROM alpine AS entrypoint
FROM alpine
#FROM pygmy/alpine-tini:latest #FROM pygmy/alpine-tini:latest
RUN apk add --no-cache ca-certificates RUN apk add --no-cache ca-certificates
@ -26,8 +26,9 @@ ENV TZ=UTC \
MAXMINDDB_FILE=/root/geoip.mmdb \ MAXMINDDB_FILE=/root/geoip.mmdb \
UAPARSER_FILE=/root/regexes.yaml \ UAPARSER_FILE=/root/regexes.yaml \
HTTP_PORT=80 \ HTTP_PORT=80 \
BEACON_SIZE_LIMIT=1000000 \ BEACON_SIZE_LIMIT=7000000 \
KAFKA_USE_SSL=true \ KAFKA_USE_SSL=true \
KAFKA_MAX_POLL_INTERVAL_MS=400000 \
REDIS_STREAMS_MAX_LEN=3000 \ REDIS_STREAMS_MAX_LEN=3000 \
TOPIC_RAW_WEB=raw \ TOPIC_RAW_WEB=raw \
TOPIC_RAW_IOS=raw-ios \ TOPIC_RAW_IOS=raw-ios \
@ -42,10 +43,11 @@ ENV TZ=UTC \
AWS_REGION_WEB=eu-central-1 \ AWS_REGION_WEB=eu-central-1 \
AWS_REGION_IOS=eu-west-1 \ AWS_REGION_IOS=eu-west-1 \
AWS_REGION_ASSETS=eu-central-1 \ AWS_REGION_ASSETS=eu-central-1 \
CACHE_ASSETS=false \ CACHE_ASSETS=true \
ASSETS_SIZE_LIMIT=6291456 \ ASSETS_SIZE_LIMIT=6291456 \
FS_CLEAN_HRS=12 FS_CLEAN_HRS=12 \
FILE_SPLIT_SIZE=300000 \
LOG_QUEUE_STATS_INTERVAL_SEC=60
RUN mkdir $FS_DIR RUN mkdir $FS_DIR
#VOLUME [ $FS_DIR ] # Uncomment in case of using Bind mount. #VOLUME [ $FS_DIR ] # Uncomment in case of using Bind mount.

28
backend/build.sh Normal file → Executable file
View file

@ -13,9 +13,19 @@ ee="false"
check_prereq() { check_prereq() {
which docker || { which docker || {
echo "Docker not installed, please install docker." echo "Docker not installed, please install docker."
exit=1 exit 1
} }
[[ exit -eq 1 ]] && exit 1 return
}
function build_service() {
image="$1"
echo "BUILDING $image"
docker build -t ${DOCKER_REPO:-'local'}/$image:${git_sha1} --platform linux/amd64 --build-arg SERVICE_NAME=$image .
[[ $PUSH_IMAGE -eq 1 ]] && {
docker push ${DOCKER_REPO:-'local'}/$image:${git_sha1}
}
return
} }
function build_api(){ function build_api(){
@ -25,21 +35,15 @@ function build_api(){
ee="true" ee="true"
} }
[[ $2 != "" ]] && { [[ $2 != "" ]] && {
image="$2" build_service $2
docker build -t ${DOCKER_REPO:-'local'}/$image:${git_sha1} --build-arg SERVICE_NAME=$image .
[[ $PUSH_IMAGE -eq 1 ]] && {
docker push ${DOCKER_REPO:-'local'}/$image:${git_sha1}
}
return return
} }
for image in $(ls services); for image in $(ls cmd);
do do
docker build -t ${DOCKER_REPO:-'local'}/$image:${git_sha1} --build-arg SERVICE_NAME=$image . build_service $image
[[ $PUSH_IMAGE -eq 1 ]] && {
docker push ${DOCKER_REPO:-'local'}/$image:${git_sha1}
}
echo "::set-output name=image::${DOCKER_REPO:-'local'}/$image:${git_sha1}" echo "::set-output name=image::${DOCKER_REPO:-'local'}/$image:${git_sha1}"
done done
echo "backend build completed"
} }
check_prereq check_prereq

View file

@ -1,45 +1,54 @@
package main package main
import ( import (
"context"
"log" "log"
"time" "openreplay/backend/pkg/monitoring"
"os" "os"
"os/signal" "os/signal"
"syscall" "syscall"
"time"
"openreplay/backend/pkg/env" "openreplay/backend/internal/assets"
"openreplay/backend/internal/assets/cacher"
config "openreplay/backend/internal/config/assets"
"openreplay/backend/pkg/messages" "openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue" "openreplay/backend/pkg/queue"
"openreplay/backend/pkg/queue/types" "openreplay/backend/pkg/queue/types"
"openreplay/backend/services/assets/cacher"
) )
func main() { func main() {
metrics := monitoring.New("assets")
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
GROUP_CACHE := env.String("GROUP_CACHE") cfg := config.New()
TOPIC_CACHE := env.String("TOPIC_CACHE")
cacher := cacher.NewCacher( cacher := cacher.NewCacher(
env.String("AWS_REGION"), cfg.AWSRegion,
env.String("S3_BUCKET_ASSETS"), cfg.S3BucketAssets,
env.String("ASSETS_ORIGIN"), cfg.AssetsOrigin,
env.Int("ASSETS_SIZE_LIMIT"), cfg.AssetsSizeLimit,
) )
totalAssets, err := metrics.RegisterCounter("assets_total")
if err != nil {
log.Printf("can't create assets_total metric: %s", err)
}
consumer := queue.NewMessageConsumer( consumer := queue.NewMessageConsumer(
GROUP_CACHE, cfg.GroupCache,
[]string{TOPIC_CACHE}, []string{cfg.TopicCache},
func(sessionID uint64, message messages.Message, e *types.Meta) { func(sessionID uint64, message messages.Message, e *types.Meta) {
switch msg := message.(type) { switch msg := message.(type) {
case *messages.AssetCache: case *messages.AssetCache:
cacher.CacheURL(sessionID, msg.URL) cacher.CacheURL(sessionID, msg.URL)
totalAssets.Add(context.Background(), 1)
case *messages.ErrorEvent: case *messages.ErrorEvent:
if msg.Source != "js_exception" { if msg.Source != "js_exception" {
return return
} }
sourceList, err := extractJSExceptionSources(&msg.Payload) sourceList, err := assets.ExtractJSExceptionSources(&msg.Payload)
if err != nil { if err != nil {
log.Printf("Error on source extraction: %v", err) log.Printf("Error on source extraction: %v", err)
return return
@ -50,14 +59,15 @@ func main() {
} }
}, },
true, true,
cfg.MessageSizeLimit,
) )
tick := time.Tick(20 * time.Minute) log.Printf("Cacher service started\n")
sigchan := make(chan os.Signal, 1) sigchan := make(chan os.Signal, 1)
signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM) signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)
log.Printf("Cacher service started\n") tick := time.Tick(20 * time.Minute)
for { for {
select { select {
case sig := <-sigchan: case sig := <-sigchan:
@ -66,6 +76,7 @@ func main() {
os.Exit(0) os.Exit(0)
case err := <-cacher.Errors: case err := <-cacher.Errors:
log.Printf("Error while caching: %v", err) log.Printf("Error while caching: %v", err)
// TODO: notify user
case <-tick: case <-tick:
cacher.UpdateTimeouts() cacher.UpdateTimeouts()
default: default:

141
backend/cmd/db/main.go Normal file
View file

@ -0,0 +1,141 @@
package main
import (
"errors"
"log"
"openreplay/backend/internal/config/db"
"openreplay/backend/internal/db/datasaver"
"openreplay/backend/pkg/handlers"
custom2 "openreplay/backend/pkg/handlers/custom"
"openreplay/backend/pkg/monitoring"
"openreplay/backend/pkg/sessions"
"time"
"os"
"os/signal"
"syscall"
"openreplay/backend/pkg/db/cache"
"openreplay/backend/pkg/db/postgres"
logger "openreplay/backend/pkg/log"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue"
"openreplay/backend/pkg/queue/types"
)
func main() {
metrics := monitoring.New("db")
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
cfg := db.New()
// Init database
pg := cache.NewPGCache(postgres.NewConn(cfg.Postgres, cfg.BatchQueueLimit, cfg.BatchSizeLimit, metrics), cfg.ProjectExpirationTimeoutMs)
defer pg.Close()
// HandlersFabric returns the list of message handlers we want to be applied to each incoming message.
handlersFabric := func() []handlers.MessageProcessor {
return []handlers.MessageProcessor{
&custom2.EventMapper{},
custom2.NewInputEventBuilder(),
custom2.NewPageEventBuilder(),
}
}
// Create handler's aggregator
builderMap := sessions.NewBuilderMap(handlersFabric)
// Init modules
saver := datasaver.New(pg)
saver.InitStats()
statsLogger := logger.NewQueueStats(cfg.LoggerTimeout)
// Handler logic
handler := func(sessionID uint64, msg messages.Message, meta *types.Meta) {
statsLogger.Collect(sessionID, meta)
// Just save session data into db without additional checks
if err := saver.InsertMessage(sessionID, msg); err != nil {
if !postgres.IsPkeyViolation(err) {
log.Printf("Message Insertion Error %v, SessionID: %v, Message: %v", err, sessionID, msg)
}
return
}
session, err := pg.GetSession(sessionID)
if session == nil {
if err != nil && !errors.Is(err, cache.NilSessionInCacheError) {
log.Printf("Error on session retrieving from cache: %v, SessionID: %v, Message: %v", err, sessionID, msg)
}
return
}
// Save statistics to db
err = saver.InsertStats(session, msg)
if err != nil {
log.Printf("Stats Insertion Error %v; Session: %v, Message: %v", err, session, msg)
}
// Handle heuristics and save to temporary queue in memory
builderMap.HandleMessage(sessionID, msg, msg.Meta().Index)
// Process saved heuristics messages as usual messages above in the code
builderMap.IterateSessionReadyMessages(sessionID, func(msg messages.Message) {
// TODO: DRY code (carefully with the return statement logic)
if err := saver.InsertMessage(sessionID, msg); err != nil {
if !postgres.IsPkeyViolation(err) {
log.Printf("Message Insertion Error %v; Session: %v, Message %v", err, session, msg)
}
return
}
if err := saver.InsertStats(session, msg); err != nil {
log.Printf("Stats Insertion Error %v; Session: %v, Message %v", err, session, msg)
}
})
}
// Init consumer
consumer := queue.NewMessageConsumer(
cfg.GroupDB,
[]string{
cfg.TopicRawWeb,
cfg.TopicAnalytics,
},
handler,
false,
cfg.MessageSizeLimit,
)
log.Printf("Db service started\n")
sigchan := make(chan os.Signal, 1)
signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)
commitTick := time.Tick(cfg.CommitBatchTimeout)
for {
select {
case sig := <-sigchan:
log.Printf("Caught signal %v: terminating\n", sig)
consumer.Close()
os.Exit(0)
case <-commitTick:
// Send collected batches to db
pg.CommitBatches()
if err := saver.CommitStats(); err != nil {
log.Printf("Error on stats commit: %v", err)
}
// TODO?: separate stats & regular messages
if err := consumer.Commit(); err != nil {
log.Printf("Error on consumer commit: %v", err)
}
default:
// Handle new message from queue
err := consumer.ConsumeNext()
if err != nil {
log.Fatalf("Error on consumption: %v", err) // TODO: is always fatal?
}
}
}
}

105
backend/cmd/ender/main.go Normal file
View file

@ -0,0 +1,105 @@
package main
import (
"log"
"openreplay/backend/internal/config/ender"
"openreplay/backend/internal/sessionender"
"openreplay/backend/pkg/db/cache"
"openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/monitoring"
"time"
"os"
"os/signal"
"syscall"
"openreplay/backend/pkg/intervals"
logger "openreplay/backend/pkg/log"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue"
"openreplay/backend/pkg/queue/types"
)
//
func main() {
metrics := monitoring.New("ender")
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
// Load service configuration
cfg := ender.New()
pg := cache.NewPGCache(postgres.NewConn(cfg.Postgres, 0, 0, metrics), cfg.ProjectExpirationTimeoutMs)
defer pg.Close()
// Init all modules
statsLogger := logger.NewQueueStats(cfg.LoggerTimeout)
sessions, err := sessionender.New(metrics, intervals.EVENTS_SESSION_END_TIMEOUT, cfg.PartitionsNumber)
if err != nil {
log.Printf("can't init ender service: %s", err)
return
}
producer := queue.NewProducer(cfg.MessageSizeLimit)
consumer := queue.NewMessageConsumer(
cfg.GroupEnder,
[]string{
cfg.TopicRawWeb,
},
func(sessionID uint64, msg messages.Message, meta *types.Meta) {
switch msg.(type) {
case *messages.SessionStart, *messages.SessionEnd:
// Skip several message types
return
}
// Test debug
if msg.Meta().Timestamp == 0 {
log.Printf("ZERO TS, sessID: %d, msgType: %d", sessionID, msg.TypeID())
}
statsLogger.Collect(sessionID, meta)
sessions.UpdateSession(sessionID, meta.Timestamp, msg.Meta().Timestamp)
},
false,
cfg.MessageSizeLimit,
)
log.Printf("Ender service started\n")
sigchan := make(chan os.Signal, 1)
signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)
tick := time.Tick(intervals.EVENTS_COMMIT_INTERVAL * time.Millisecond)
for {
select {
case sig := <-sigchan:
log.Printf("Caught signal %v: terminating\n", sig)
producer.Close(cfg.ProducerTimeout)
if err := consumer.CommitBack(intervals.EVENTS_BACK_COMMIT_GAP); err != nil {
log.Printf("can't commit messages with offset: %s", err)
}
consumer.Close()
os.Exit(0)
case <-tick:
// Find ended sessions and send notification to other services
sessions.HandleEndedSessions(func(sessionID uint64, timestamp int64) bool {
msg := &messages.SessionEnd{Timestamp: uint64(timestamp)}
if err := pg.InsertSessionEnd(sessionID, msg.Timestamp); err != nil {
log.Printf("can't save sessionEnd to database, sessID: %d", sessionID)
return false
}
if err := producer.Produce(cfg.TopicRawWeb, sessionID, messages.Encode(msg)); err != nil {
log.Printf("can't send sessionEnd to topic: %s; sessID: %d", err, sessionID)
return false
}
return true
})
producer.Flush(cfg.ProducerTimeout)
if err := consumer.CommitBack(intervals.EVENTS_BACK_COMMIT_GAP); err != nil {
log.Printf("can't commit messages with offset: %s", err)
}
default:
if err := consumer.ConsumeNext(); err != nil {
log.Fatalf("Error on consuming: %v", err)
}
}
}
}

View file

@ -0,0 +1,92 @@
package main
import (
"log"
"openreplay/backend/internal/config/heuristics"
"openreplay/backend/pkg/handlers"
web2 "openreplay/backend/pkg/handlers/web"
"openreplay/backend/pkg/intervals"
logger "openreplay/backend/pkg/log"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue"
"openreplay/backend/pkg/queue/types"
"openreplay/backend/pkg/sessions"
"os"
"os/signal"
"syscall"
"time"
)
func main() {
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
// Load service configuration
cfg := heuristics.New()
// HandlersFabric returns the list of message handlers we want to be applied to each incoming message.
handlersFabric := func() []handlers.MessageProcessor {
return []handlers.MessageProcessor{
// web handlers
&web2.ClickRageDetector{},
&web2.CpuIssueDetector{},
&web2.DeadClickDetector{},
&web2.MemoryIssueDetector{},
&web2.NetworkIssueDetector{},
&web2.PerformanceAggregator{},
// iOS's handlers
//&ios2.AppNotResponding{},
//&ios2.ClickRageDetector{},
//&ios2.PerformanceAggregator{},
// Other handlers (you can add your custom handlers here)
//&custom.CustomHandler{},
}
}
// Create handler's aggregator
builderMap := sessions.NewBuilderMap(handlersFabric)
// Init logger
statsLogger := logger.NewQueueStats(cfg.LoggerTimeout)
// Init producer and consumer for data bus
producer := queue.NewProducer(cfg.MessageSizeLimit)
consumer := queue.NewMessageConsumer(
cfg.GroupHeuristics,
[]string{
cfg.TopicRawWeb,
},
func(sessionID uint64, msg messages.Message, meta *types.Meta) {
statsLogger.Collect(sessionID, meta)
builderMap.HandleMessage(sessionID, msg, msg.Meta().Index)
},
false,
cfg.MessageSizeLimit,
)
log.Printf("Heuristics service started\n")
sigchan := make(chan os.Signal, 1)
signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)
tick := time.Tick(intervals.EVENTS_COMMIT_INTERVAL * time.Millisecond)
for {
select {
case sig := <-sigchan:
log.Printf("Caught signal %v: terminating\n", sig)
producer.Close(cfg.ProducerTimeout)
consumer.Commit()
consumer.Close()
os.Exit(0)
case <-tick:
builderMap.IterateReadyMessages(func(sessionID uint64, readyMsg messages.Message) {
producer.Produce(cfg.TopicAnalytics, sessionID, messages.Encode(readyMsg))
})
producer.Flush(cfg.ProducerTimeout)
consumer.Commit()
default:
if err := consumer.ConsumeNext(); err != nil {
log.Fatalf("Error on consuming: %v", err)
}
}
}
}

66
backend/cmd/http/main.go Normal file
View file

@ -0,0 +1,66 @@
package main
import (
"log"
"openreplay/backend/internal/config/http"
"openreplay/backend/internal/http/router"
"openreplay/backend/internal/http/server"
"openreplay/backend/internal/http/services"
"openreplay/backend/pkg/monitoring"
"os"
"os/signal"
"syscall"
"openreplay/backend/pkg/db/cache"
"openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/pprof"
"openreplay/backend/pkg/queue"
)
func main() {
metrics := monitoring.New("http")
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
pprof.StartProfilingServer()
cfg := http.New()
// Connect to queue
producer := queue.NewProducer(cfg.MessageSizeLimit)
defer producer.Close(15000)
// Connect to database
dbConn := cache.NewPGCache(postgres.NewConn(cfg.Postgres, 0, 0, metrics), 1000*60*20)
defer dbConn.Close()
// Build all services
services := services.New(cfg, producer, dbConn)
// Init server's routes
router, err := router.NewRouter(cfg, services, metrics)
if err != nil {
log.Fatalf("failed while creating engine: %s", err)
}
// Init server
server, err := server.New(router.GetHandler(), cfg.HTTPHost, cfg.HTTPPort, cfg.HTTPTimeout)
if err != nil {
log.Fatalf("failed while creating server: %s", err)
}
// Run server
go func() {
if err := server.Start(); err != nil {
log.Fatalf("Server error: %v\n", err)
}
}()
log.Printf("Server successfully started on port %v\n", cfg.HTTPPort)
// Wait stop signal to shut down server gracefully
sigchan := make(chan os.Signal, 1)
signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)
<-sigchan
log.Printf("Shutting down the server\n")
server.Stop()
}

View file

@ -2,6 +2,9 @@ package main
import ( import (
"log" "log"
config "openreplay/backend/internal/config/integrations"
"openreplay/backend/internal/integrations/clientManager"
"openreplay/backend/pkg/monitoring"
"time" "time"
"os" "os"
@ -9,23 +12,24 @@ import (
"syscall" "syscall"
"openreplay/backend/pkg/db/postgres" "openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/env"
"openreplay/backend/pkg/intervals" "openreplay/backend/pkg/intervals"
"openreplay/backend/pkg/messages" "openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue" "openreplay/backend/pkg/queue"
"openreplay/backend/pkg/token" "openreplay/backend/pkg/token"
"openreplay/backend/services/integrations/clientManager"
) )
//
func main() { func main() {
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) metrics := monitoring.New("integrations")
TOPIC_RAW_WEB := env.String("TOPIC_RAW_WEB")
POSTGRES_STRING := env.String("POSTGRES_STRING")
pg := postgres.NewConn(POSTGRES_STRING) log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
cfg := config.New()
pg := postgres.NewConn(cfg.PostgresURI, 0, 0, metrics)
defer pg.Close() defer pg.Close()
tokenizer := token.NewTokenizer(env.String("TOKEN_SECRET")) tokenizer := token.NewTokenizer(cfg.TokenSecret)
manager := clientManager.NewManager() manager := clientManager.NewManager()
@ -42,10 +46,10 @@ func main() {
} }
}) })
producer := queue.NewProducer() producer := queue.NewProducer(cfg.MessageSizeLimit)
defer producer.Close(15000) defer producer.Close(15000)
listener, err := postgres.NewIntegrationsListener(POSTGRES_STRING) listener, err := postgres.NewIntegrationsListener(cfg.PostgresURI)
if err != nil { if err != nil {
log.Printf("Postgres listener error: %v\n", err) log.Printf("Postgres listener error: %v\n", err)
log.Fatalf("Postgres listener error") log.Fatalf("Postgres listener error")
@ -70,7 +74,7 @@ func main() {
log.Printf("Requesting all...\n") log.Printf("Requesting all...\n")
manager.RequestAll() manager.RequestAll()
case event := <-manager.Events: case event := <-manager.Events:
log.Printf("New integration event: %+v\n", *event.RawErrorEvent) log.Printf("New integration event: %+v\n", *event.IntegrationEvent)
sessionID := event.SessionID sessionID := event.SessionID
if sessionID == 0 { if sessionID == 0 {
sessData, err := tokenizer.Parse(event.Token) sessData, err := tokenizer.Parse(event.Token)
@ -80,8 +84,7 @@ func main() {
} }
sessionID = sessData.ID sessionID = sessData.ID
} }
// TODO: send to ready-events topic. Otherwise it have to go through the events worker. producer.Produce(cfg.TopicAnalytics, sessionID, messages.Encode(event.IntegrationEvent))
producer.Produce(TOPIC_RAW_WEB, sessionID, messages.Encode(event.RawErrorEvent))
case err := <-manager.Errors: case err := <-manager.Errors:
log.Printf("Integration error: %v\n", err) log.Printf("Integration error: %v\n", err)
case i := <-manager.RequestDataUpdates: case i := <-manager.RequestDataUpdates:

141
backend/cmd/sink/main.go Normal file
View file

@ -0,0 +1,141 @@
package main
import (
"context"
"encoding/binary"
"log"
"openreplay/backend/internal/sink/assetscache"
"openreplay/backend/internal/sink/oswriter"
"openreplay/backend/internal/storage"
"openreplay/backend/pkg/monitoring"
"time"
"os"
"os/signal"
"syscall"
"openreplay/backend/internal/config/sink"
. "openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue"
"openreplay/backend/pkg/queue/types"
"openreplay/backend/pkg/url/assets"
)
func main() {
metrics := monitoring.New("sink")
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
cfg := sink.New()
if _, err := os.Stat(cfg.FsDir); os.IsNotExist(err) {
log.Fatalf("%v doesn't exist. %v", cfg.FsDir, err)
}
writer := oswriter.NewWriter(cfg.FsUlimit, cfg.FsDir)
producer := queue.NewProducer(cfg.MessageSizeLimit)
defer producer.Close(cfg.ProducerCloseTimeout)
rewriter := assets.NewRewriter(cfg.AssetsOrigin)
assetMessageHandler := assetscache.New(cfg, rewriter, producer)
counter := storage.NewLogCounter()
totalMessages, err := metrics.RegisterCounter("messages_total")
if err != nil {
log.Printf("can't create messages_total metric: %s", err)
}
savedMessages, err := metrics.RegisterCounter("messages_saved")
if err != nil {
log.Printf("can't create messages_saved metric: %s", err)
}
messageSize, err := metrics.RegisterHistogram("messages_size")
if err != nil {
log.Printf("can't create messages_size metric: %s", err)
}
consumer := queue.NewMessageConsumer(
cfg.GroupSink,
[]string{
cfg.TopicRawWeb,
},
func(sessionID uint64, message Message, _ *types.Meta) {
// Process assets
message = assetMessageHandler.ParseAssets(sessionID, message)
totalMessages.Add(context.Background(), 1)
// Filter message
typeID := message.TypeID()
// Send SessionEnd trigger to storage service
switch message.(type) {
case *SessionEnd:
if err := producer.Produce(cfg.TopicTrigger, sessionID, Encode(message)); err != nil {
log.Printf("can't send SessionEnd to trigger topic: %s; sessID: %d", err, sessionID)
}
return
}
if !IsReplayerType(typeID) {
return
}
// If message timestamp is empty, use at least ts of session start
ts := message.Meta().Timestamp
if ts == 0 {
log.Printf("zero ts; sessID: %d, msg: %+v", sessionID, message)
} else {
// Log ts of last processed message
counter.Update(sessionID, time.UnixMilli(ts))
}
value := message.Encode()
var data []byte
if IsIOSType(typeID) {
data = value
} else {
data = make([]byte, len(value)+8)
copy(data[8:], value[:])
binary.LittleEndian.PutUint64(data[0:], message.Meta().Index)
}
if err := writer.Write(sessionID, data); err != nil {
log.Printf("Writer error: %v\n", err)
}
messageSize.Record(context.Background(), float64(len(data)))
savedMessages.Add(context.Background(), 1)
},
false,
cfg.MessageSizeLimit,
)
log.Printf("Sink service started\n")
sigchan := make(chan os.Signal, 1)
signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)
tick := time.Tick(30 * time.Second)
for {
select {
case sig := <-sigchan:
log.Printf("Caught signal %v: terminating\n", sig)
if err := consumer.Commit(); err != nil {
log.Printf("can't commit messages: %s", err)
}
consumer.Close()
os.Exit(0)
case <-tick:
if err := writer.SyncAll(); err != nil {
log.Fatalf("Sync error: %v\n", err)
}
counter.Print()
if err := consumer.Commit(); err != nil {
log.Printf("can't commit messages: %s", err)
}
default:
err := consumer.ConsumeNext()
if err != nil {
log.Fatalf("Error on consumption: %v", err)
}
}
}
}

View file

@ -0,0 +1,74 @@
package main
import (
"log"
"openreplay/backend/pkg/monitoring"
"os"
"os/signal"
"strconv"
"syscall"
"time"
config "openreplay/backend/internal/config/storage"
"openreplay/backend/internal/storage"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue"
"openreplay/backend/pkg/queue/types"
s3storage "openreplay/backend/pkg/storage"
)
func main() {
metrics := monitoring.New("storage")
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
cfg := config.New()
s3 := s3storage.NewS3(cfg.S3Region, cfg.S3Bucket)
srv, err := storage.New(cfg, s3, metrics)
if err != nil {
log.Printf("can't init storage service: %s", err)
return
}
counter := storage.NewLogCounter()
consumer := queue.NewMessageConsumer(
cfg.GroupStorage,
[]string{
cfg.TopicTrigger,
},
func(sessionID uint64, msg messages.Message, meta *types.Meta) {
switch msg.(type) {
case *messages.SessionEnd:
srv.UploadKey(strconv.FormatUint(sessionID, 10), 5)
// Log timestamp of last processed session
counter.Update(sessionID, time.UnixMilli(meta.Timestamp))
}
},
true,
cfg.MessageSizeLimit,
)
log.Printf("Storage service started\n")
sigchan := make(chan os.Signal, 1)
signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)
counterTick := time.Tick(time.Second * 30)
for {
select {
case sig := <-sigchan:
log.Printf("Caught signal %v: terminating\n", sig)
consumer.Close()
os.Exit(0)
case <-counterTick:
go counter.Print()
default:
err := consumer.ConsumeNext()
if err != nil {
log.Fatalf("Error on consumption: %v", err)
}
}
}
}

14
backend/development.md Normal file
View file

@ -0,0 +1,14 @@
### Prerequisites
- [Vagrant](../scripts/vagrant/README.md)
### Building and deploying locally
```bash
cd openreplay-contributions
vagrant ssh
cd openreplay-dev/openreplay/scripts/helmcharts
# For complete list of options
# bash local_deploy.sh help
bash local_deploy.sh <worker name>
```

View file

@ -4,9 +4,9 @@ go 1.18
require ( require (
cloud.google.com/go/logging v1.4.2 cloud.google.com/go/logging v1.4.2
github.com/ClickHouse/clickhouse-go v1.4.3
github.com/aws/aws-sdk-go v1.35.23 github.com/aws/aws-sdk-go v1.35.23
github.com/btcsuite/btcutil v1.0.2 github.com/btcsuite/btcutil v1.0.2
github.com/caarlos0/env/v6 v6.9.3
github.com/elastic/go-elasticsearch/v7 v7.13.1 github.com/elastic/go-elasticsearch/v7 v7.13.1
github.com/go-redis/redis v6.15.9+incompatible github.com/go-redis/redis v6.15.9+incompatible
github.com/google/uuid v1.1.2 github.com/google/uuid v1.1.2
@ -17,21 +17,30 @@ require (
github.com/klauspost/pgzip v1.2.5 github.com/klauspost/pgzip v1.2.5
github.com/oschwald/maxminddb-golang v1.7.0 github.com/oschwald/maxminddb-golang v1.7.0
github.com/pkg/errors v0.9.1 github.com/pkg/errors v0.9.1
github.com/sethvargo/go-envconfig v0.7.0
github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce
github.com/ua-parser/uap-go v0.0.0-20200325213135-e1c09f13e2fe github.com/ua-parser/uap-go v0.0.0-20200325213135-e1c09f13e2fe
golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420 go.opentelemetry.io/otel v1.7.0
google.golang.org/api v0.50.0 go.opentelemetry.io/otel/exporters/prometheus v0.30.0
gopkg.in/confluentinc/confluent-kafka-go.v1 v1.7.0 go.opentelemetry.io/otel/metric v0.30.0
go.opentelemetry.io/otel/sdk/metric v0.30.0
golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2
google.golang.org/api v0.81.0
) )
require ( require (
cloud.google.com/go v0.84.0 // indirect cloud.google.com/go v0.100.2 // indirect
github.com/cloudflare/golz4 v0.0.0-20150217214814-ef862a3cdc58 // indirect cloud.google.com/go/compute v1.6.1 // indirect
github.com/confluentinc/confluent-kafka-go v1.7.0 // indirect cloud.google.com/go/iam v0.3.0 // indirect
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect cloud.google.com/go/storage v1.14.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/cespare/xxhash/v2 v2.1.2 // indirect
github.com/go-logr/logr v1.2.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/protobuf v1.5.2 // indirect github.com/golang/protobuf v1.5.2 // indirect
github.com/google/go-cmp v0.5.6 // indirect github.com/google/go-cmp v0.5.8 // indirect
github.com/googleapis/gax-go/v2 v2.0.5 // indirect github.com/googleapis/gax-go/v2 v2.4.0 // indirect
github.com/jackc/chunkreader/v2 v2.0.1 // indirect github.com/jackc/chunkreader/v2 v2.0.1 // indirect
github.com/jackc/pgio v1.0.0 // indirect github.com/jackc/pgio v1.0.0 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect
@ -40,21 +49,26 @@ require (
github.com/jackc/pgtype v1.3.0 // indirect github.com/jackc/pgtype v1.3.0 // indirect
github.com/jackc/puddle v1.1.0 // indirect github.com/jackc/puddle v1.1.0 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/jstemmer/go-junit-report v0.9.1 // indirect
github.com/klauspost/compress v1.11.9 // indirect github.com/klauspost/compress v1.11.9 // indirect
github.com/kr/pretty v0.3.0 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
github.com/prometheus/client_golang v1.12.1 // indirect
github.com/prometheus/client_model v0.2.0 // indirect
github.com/prometheus/common v0.32.1 // indirect
github.com/prometheus/procfs v0.7.3 // indirect
go.opencensus.io v0.23.0 // indirect go.opencensus.io v0.23.0 // indirect
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 // indirect go.opentelemetry.io/otel/sdk v1.7.0 // indirect
golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 // indirect go.opentelemetry.io/otel/trace v1.7.0 // indirect
golang.org/x/mod v0.4.2 // indirect golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4 // indirect
golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914 // indirect golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 // indirect
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect golang.org/x/sync v0.0.0-20220513210516-0976fa681c29 // indirect
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22 // indirect golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a // indirect
golang.org/x/text v0.3.6 // indirect golang.org/x/text v0.3.7 // indirect
golang.org/x/tools v0.1.4 // indirect golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df // indirect
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
google.golang.org/appengine v1.6.7 // indirect google.golang.org/appengine v1.6.7 // indirect
google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84 // indirect google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd // indirect
google.golang.org/grpc v1.38.0 // indirect google.golang.org/grpc v1.46.2 // indirect
google.golang.org/protobuf v1.26.0 // indirect google.golang.org/protobuf v1.28.0 // indirect
gopkg.in/yaml.v2 v2.2.8 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.0 // indirect
) )

View file

@ -15,20 +15,36 @@ cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOY
cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY=
cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI=
cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk=
cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY=
cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg=
cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8=
cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0=
cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY=
cloud.google.com/go v0.84.0 h1:hVhK90DwCdOAYGME/FJd9vNIZye9HBR6Yy3fu4js3N8=
cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM=
cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY=
cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ=
cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI=
cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4=
cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc=
cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA=
cloud.google.com/go v0.100.2 h1:t9Iw5QH5v4XtlEQaCtUY7x6sCABps8sW0acw7e2WQ6Y=
cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A=
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=
cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=
cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ=
cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow=
cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM=
cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M=
cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz/FMzPu0s=
cloud.google.com/go/compute v1.6.1 h1:2sMmt8prCn7DPaG4Pmh0N3Inmc8cT8ae5k1M6VJ9Wqc=
cloud.google.com/go/compute v1.6.1/go.mod h1:g85FgpzFvNULZ+S8AYq87axRKuf2Kh7deLqV/jJ3thU=
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
cloud.google.com/go/iam v0.3.0 h1:exkAomrVUuzx9kWFI1wm3KI0uoDeUFPB4kKGzx6x+Gc=
cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY=
cloud.google.com/go/logging v1.4.2 h1:Mu2Q75VBDQlW1HlBMjTX4X84UFR73G1TiLlRYc/b7tA= cloud.google.com/go/logging v1.4.2 h1:Mu2Q75VBDQlW1HlBMjTX4X84UFR73G1TiLlRYc/b7tA=
cloud.google.com/go/logging v1.4.2/go.mod h1:jco9QZSx8HiVVqLJReq7z7bVdj0P1Jb9PDFs63T+axo= cloud.google.com/go/logging v1.4.2/go.mod h1:jco9QZSx8HiVVqLJReq7z7bVdj0P1Jb9PDFs63T+axo=
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
@ -39,18 +55,28 @@ cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiy
cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
cloud.google.com/go/storage v1.10.0 h1:STgFzyU5/8miMl0//zKh2aQeTyeaUH3WN9bSUiJ09bA=
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
cloud.google.com/go/storage v1.14.0 h1:6RRlFMv1omScs6iq2hfE3IvgE+l6RfJPampq8UZc5TU=
cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/ClickHouse/clickhouse-go v1.4.3 h1:iAFMa2UrQdR5bHJ2/yaSLffZkxpcOYQMCUuKeNXGdqc= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/ClickHouse/clickhouse-go v1.4.3/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI=
github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/aws/aws-sdk-go v1.35.23 h1:SCP0d0XvyJTDmfnHEQPvBaYi3kea1VNUo7uQmkVgFts= github.com/aws/aws-sdk-go v1.35.23 h1:SCP0d0XvyJTDmfnHEQPvBaYi3kea1VNUo7uQmkVgFts=
github.com/aws/aws-sdk-go v1.35.23/go.mod h1:tlPOdRjfxPBpNIwqDj61rmsnA85v9jc0Ps9+muhnW+k= github.com/aws/aws-sdk-go v1.35.23/go.mod h1:tlPOdRjfxPBpNIwqDj61rmsnA85v9jc0Ps9+muhnW+k=
github.com/bkaradzic/go-lz4 v1.0.0 h1:RXc4wYsyz985CkXXeX04y4VnZFGG8Rd43pRaHsOXAKk= github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A=
github.com/bkaradzic/go-lz4 v1.0.0/go.mod h1:0YdlkowM3VswSROI7qDxhRvJ3sLhlFrRRwjwegp5jy4= github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ= github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ=
github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA= github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA=
github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg=
@ -61,23 +87,32 @@ github.com/btcsuite/goleveldb v0.0.0-20160330041536-7834afc9e8cd/go.mod h1:F+uVa
github.com/btcsuite/snappy-go v0.0.0-20151229074030-0bdef8d06723/go.mod h1:8woku9dyThutzjeg+3xrA5iCpBRH8XEEg3lh6TiUghc= github.com/btcsuite/snappy-go v0.0.0-20151229074030-0bdef8d06723/go.mod h1:8woku9dyThutzjeg+3xrA5iCpBRH8XEEg3lh6TiUghc=
github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792/go.mod h1:ghJtEyQwv5/p4Mg4C0fgbePVuGr935/5ddU9Z3TmDRY= github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792/go.mod h1:ghJtEyQwv5/p4Mg4C0fgbePVuGr935/5ddU9Z3TmDRY=
github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46fmI40EZs= github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46fmI40EZs=
github.com/caarlos0/env/v6 v6.9.3 h1:Tyg69hoVXDnpO5Qvpsu8EoquarbPyQb+YwExWHP8wWU=
github.com/caarlos0/env/v6 v6.9.3/go.mod h1:hvp/ryKXKipEkcuYjs9mI4bBCg+UI0Yhgm5Zu0ddvwc=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE=
github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cloudflare/golz4 v0.0.0-20150217214814-ef862a3cdc58 h1:F1EaeKL/ta07PY/k9Os/UFtwERei2/XzGemhpGnBKNg=
github.com/cloudflare/golz4 v0.0.0-20150217214814-ef862a3cdc58/go.mod h1:EOBUe0h4xcZ5GoxqC5SDxFQ8gwyZPKQoEzownBlhI80=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI=
github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
github.com/confluentinc/confluent-kafka-go v1.7.0 h1:tXh3LWb2Ne0WiU3ng4h5qiGA9XV61rz46w60O+cq8bM=
github.com/confluentinc/confluent-kafka-go v1.7.0/go.mod h1:u2zNLny2xq+5rWeTQjFHbDzzNuba4P1vo31r9r4uAdg=
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
@ -90,22 +125,38 @@ github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1m
github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po=
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ=
github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0=
github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0=
github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg= github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg=
github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA=
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/gofrs/uuid v3.2.0+incompatible h1:y12jRkkFxsd7GpqdSZ+/KCs/fJbqpEXSGd4+jfEaewE= github.com/gofrs/uuid v3.2.0+incompatible h1:y12jRkkFxsd7GpqdSZ+/KCs/fJbqpEXSGd4+jfEaewE=
github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
@ -114,6 +165,7 @@ github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt
github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4=
github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8=
github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
@ -146,8 +198,11 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg=
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
@ -161,17 +216,26 @@ github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hf
github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0=
github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM=
github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM=
github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM=
github.com/googleapis/gax-go/v2 v2.4.0 h1:dS9eYAjhrE2RjmzYw2XAPvcXfmcQLtFEQWn0CR82awk=
github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK9wbMD5+iXC6c=
github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI= github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
@ -226,11 +290,16 @@ github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9Y
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ= github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1 h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4=
github.com/klauspost/compress v1.11.9 h1:5OCMOdde1TCT2sookEuVeEZzA8bmRSFV3AwPDZAG8AA= github.com/klauspost/compress v1.11.9 h1:5OCMOdde1TCT2sookEuVeEZzA8bmRSFV3AwPDZAG8AA=
@ -239,12 +308,16 @@ github.com/klauspost/pgzip v1.2.5 h1:qnWYvvKqedOF2ulHpMG72XQol4ILEJ8k2wwRl/Km8oE
github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0= github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
@ -255,7 +328,15 @@ github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hd
github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ=
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.7.0 h1:WSHQ+IS43OoUrWtD1/bbclrwK8TTH5hzp+umCiuxHgs= github.com/onsi/ginkgo v1.7.0 h1:WSHQ+IS43OoUrWtD1/bbclrwK8TTH5hzp+umCiuxHgs=
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
@ -263,23 +344,51 @@ github.com/onsi/gomega v1.4.3 h1:RE1xgDvH7imwFD45h+u2SgIfERHlS2yNG4DObb5BSKU=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/oschwald/maxminddb-golang v1.7.0 h1:JmU4Q1WBv5Q+2KZy5xJI+98aUwTIrPPxZUkd5Cwr8Zc= github.com/oschwald/maxminddb-golang v1.7.0 h1:JmU4Q1WBv5Q+2KZy5xJI+98aUwTIrPPxZUkd5Cwr8Zc=
github.com/oschwald/maxminddb-golang v1.7.0/go.mod h1:RXZtst0N6+FY/3qCNmZMBApR19cdQj43/NM9VkrNAis= github.com/oschwald/maxminddb-golang v1.7.0/go.mod h1:RXZtst0N6+FY/3qCNmZMBApR19cdQj43/NM9VkrNAis=
github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM8aXeqhl0I= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0=
github.com/prometheus/client_golang v1.12.1 h1:ZiaPsmm9uiBeaSMRznKsCDNtPCS0T3JVDGF+06gjBzk=
github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M=
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc=
github.com/prometheus/common v0.32.1 h1:hWIdL3N2HoUx3B8j3YN9mWor0qhY/NlEKZEaXxuIRh4=
github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
github.com/prometheus/procfs v0.7.3 h1:4jVXhlkAyzOScmCkXBTOLRLTz8EeU+eyjrwB/EPq0VU=
github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.6.1 h1:/FiVV8dS/e+YqF2JvO3yXRFbBLTIuSDkuC7aBOAvL+k=
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU=
github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc=
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/sethvargo/go-envconfig v0.7.0 h1:P/ljQXSRjgAgsnIripHs53Jg/uNVXu2FYQ9yLSDappA=
github.com/sethvargo/go-envconfig v0.7.0/go.mod h1:00S1FAhRUuTNJazWBWcJGvEHOM+NO6DhoRMAOX7FY5o=
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24 h1:pntxY8Ary0t43dCZ5dqY4YTJCObLY1kIXl0uzMv+7DE= github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24 h1:pntxY8Ary0t43dCZ5dqY4YTJCObLY1kIXl0uzMv+7DE=
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
@ -287,8 +396,10 @@ github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXf
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMTY=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce h1:fb190+cK2Xz/dvi9Hv8eCYJYvIGUTN2/KLq1pT6CjEc= github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce h1:fb190+cK2Xz/dvi9Hv8eCYJYvIGUTN2/KLq1pT6CjEc=
github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce/go.mod h1:o8v6yHRoik09Xen7gje4m9ERNah1d1PPsVq1VEx9vE4= github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce/go.mod h1:o8v6yHRoik09Xen7gje4m9ERNah1d1PPsVq1VEx9vE4=
github.com/ua-parser/uap-go v0.0.0-20200325213135-e1c09f13e2fe h1:aj/vX5epIlQQBEocKoM9nSAiNpakdQzElc8SaRFPu+I= github.com/ua-parser/uap-go v0.0.0-20200325213135-e1c09f13e2fe h1:aj/vX5epIlQQBEocKoM9nSAiNpakdQzElc8SaRFPu+I=
@ -307,12 +418,26 @@ go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M=
go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
go.opentelemetry.io/otel v1.7.0 h1:Z2lA3Tdch0iDcrhJXDIlC94XE+bxok1F9B+4Lz/lGsM=
go.opentelemetry.io/otel v1.7.0/go.mod h1:5BdUoMIz5WEs0vt0CUEMtSSaTSHBBVwrhnz7+nrD5xk=
go.opentelemetry.io/otel/exporters/prometheus v0.30.0 h1:YXo5ZY5nofaEYMCMTTMaRH2cLDZB8+0UGuk5RwMfIo0=
go.opentelemetry.io/otel/exporters/prometheus v0.30.0/go.mod h1:qN5feW+0/d661KDtJuATEmHtw5bKBK7NSvNEP927zSs=
go.opentelemetry.io/otel/metric v0.30.0 h1:Hs8eQZ8aQgs0U49diZoaS6Uaxw3+bBE3lcMUKBFIk3c=
go.opentelemetry.io/otel/metric v0.30.0/go.mod h1:/ShZ7+TS4dHzDFmfi1kSXMhMVubNoP0oIaBp70J6UXU=
go.opentelemetry.io/otel/sdk v1.7.0 h1:4OmStpcKVOfvDOgCt7UriAPtKolwIhxpnSNI/yK+1B0=
go.opentelemetry.io/otel/sdk v1.7.0/go.mod h1:uTEOTwaqIVuTGiJN7ii13Ibp75wJmYUDe374q6cZwUU=
go.opentelemetry.io/otel/sdk/metric v0.30.0 h1:XTqQ4y3erR2Oj8xSAOL5ovO5011ch2ELg51z4fVkpME=
go.opentelemetry.io/otel/sdk/metric v0.30.0/go.mod h1:8AKFRi5HyvTR0RRty3paN1aMC9HMT+NzcEhw/BLkLX8=
go.opentelemetry.io/otel/trace v1.7.0 h1:O37Iogk1lEkMRXewVtZ1BBTVn5JEp8GrJvP92bJqC6o=
go.opentelemetry.io/otel/trace v1.7.0/go.mod h1:fzLSB9nqR2eXzxPXb2JW9IKE+ScyXA48yyE4TNvoHqU=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
@ -321,8 +446,9 @@ golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200115085410-6d4e4cb37c7d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200115085410-6d4e4cb37c7d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 h1:psW17arqaxU48Z5kZ0CQnkZWQJsqcURM6tKiBApRjXI=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4 h1:kUhD7nTDoI3fVd9G4ORWrbV5NY0liEs/Jg2pv5f+bBA=
golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@ -346,7 +472,6 @@ golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRu
golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 h1:VLliZ0d+/avPrXXH+OakdXhpJuEoBZuwh1m2j7U6Iug=
golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
@ -358,11 +483,11 @@ golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
@ -370,6 +495,7 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
@ -392,12 +518,20 @@ golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwY
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420 h1:a8jGStKg0XqKDlKqjLrXn0ioF5MH36pT7Z0BRTqLhbk=
golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2 h1:NWy5+hlRbC7HK+PmcXVUmW1IMyFce7to56IUvhUFm7Y=
golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@ -411,8 +545,14 @@ golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ
golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20210427180440-81ed05c6b58c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210427180440-81ed05c6b58c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914 h1:3B43BWw0xEBsLZ/NO1VALz6fppU3481pik+2Ksv45z8=
golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 h1:OSnWWcOd/CtWQC2cYSBgbTSJv3ciqd8r54ySIW2y3RE=
golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@ -423,11 +563,13 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220513210516-0976fa681c29 h1:w8s32wxx3sY+OjLlv9qltkLU5yvJzxjjgiHWLjdIcw4=
golang.org/x/sync v0.0.0-20220513210516-0976fa681c29/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -445,6 +587,7 @@ golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191224085550-c709ea063b76/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191224085550-c709ea063b76/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -457,6 +600,8 @@ golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -464,19 +609,40 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210503080704-8803ae5d1324/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210503080704-8803ae5d1324/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22 h1:RqytpXGR1iVNX7psjB3ff8y7sNFinVFvkx1c8SjBkio= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a h1:dGzPydgVsqGcTRVwiLJ1jVbufYwmzD3LfVPLKsKg+0k=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -484,8 +650,9 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@ -536,19 +703,22 @@ golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4f
golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.4 h1:cVngSRcfgyZCzys3KYOpCFa+4dqX/Oub9tAq00ttGVs=
golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df h1:5Pf6pFKu98ODmgnpvkJ3kFUOQGGLIzLIkbzUHp47618=
golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
@ -573,8 +743,22 @@ google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk
google.golang.org/api v0.46.0/go.mod h1:ceL4oozhkAiTID8XMmJBsIxID/9wMXJVVFXPg4ylg3I= google.golang.org/api v0.46.0/go.mod h1:ceL4oozhkAiTID8XMmJBsIxID/9wMXJVVFXPg4ylg3I=
google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo=
google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4=
google.golang.org/api v0.50.0 h1:LX7NFCFYOHzr7WHaYiRUpeipZe9o5L8T+2F4Z798VDw=
google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw=
google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU=
google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k=
google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE=
google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE=
google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI=
google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I=
google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo=
google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g=
google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA=
google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8=
google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs=
google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA=
google.golang.org/api v0.78.0/go.mod h1:1Sg78yoMLOhlQTeF+ARBoytAcH1NNyyl390YMy6rKmw=
google.golang.org/api v0.81.0 h1:o8WF5AvfidafWbFjsRyupxyEQJNUWxLZJCK5NXrxZZ8=
google.golang.org/api v0.81.0/go.mod h1:FA6Mb/bZxj706H2j+j2d6mHEEaHBmbbWnkfvmorOCko=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
@ -606,6 +790,7 @@ google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfG
google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U=
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=
@ -617,7 +802,9 @@ google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6D
google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
@ -628,8 +815,37 @@ google.golang.org/genproto v0.0.0-20210517163617-5e0236093d7a/go.mod h1:P3QM42oQ
google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84 h1:R1r5J0u6Cx+RNl/6mezTw6oA14cmKC96FeUwL6A9bd4=
google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24=
google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k=
google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k=
google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48=
google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48=
google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w=
google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=
google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=
google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=
google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=
google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E=
google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4=
google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd h1:e0TwkXOdbnH/1x5rc5MZ/VYyiZ4v+RdVfrGMqEwT68I=
google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
@ -643,6 +859,7 @@ google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3Iji
google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8=
google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
@ -650,8 +867,16 @@ google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAG
google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
google.golang.org/grpc v1.38.0 h1:/9BgsAsa5nWe26HqOlvlgJnqBuktYOLCgjCPqsa56W0=
google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE=
google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE=
google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=
google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=
google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU=
google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ=
google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk=
google.golang.org/grpc v1.46.2 h1:u+MLGgVf7vRdjEYZ8wDFhAVNmhkbJ5hmrA1LMWK1CAQ=
google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk=
google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
@ -664,13 +889,15 @@ google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpAD
google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw=
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/confluentinc/confluent-kafka-go.v1 v1.7.0 h1:+RlmciBLDd/XwM1iudiG3HtCg45purnsOxEoY/+JZdQ= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
gopkg.in/confluentinc/confluent-kafka-go.v1 v1.7.0/go.mod h1:ZdI3yfYmdNSLQPNCpO1y00EHyWaHG5EnQEyL/ntAegY= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4= gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
@ -679,10 +906,16 @@ gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkep
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0 h1:hjy8E9ON/egN1tAYqKb61G10WtihqetD4sz2H+8nIeA=
gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=

View file

@ -1,17 +1,15 @@
package main package assets
import ( import (
"encoding/json" "encoding/json"
"strings" "strings"
) )
type frame struct { type frame struct {
FileName string `json:"fileName"` FileName string `json:"fileName"`
} }
func ExtractJSExceptionSources(payload *string) ([]string, error) {
func extractJSExceptionSources(payload *string) ([]string, error) {
var frameList []frame var frameList []frame
err := json.Unmarshal([]byte(*payload), &frameList) err := json.Unmarshal([]byte(*payload), &frameList)
if err != nil { if err != nil {
@ -25,8 +23,8 @@ func extractJSExceptionSources(payload *string) ([]string, error) {
fn := strings.Split(f.FileName, "?")[0] fn := strings.Split(f.FileName, "?")[0]
if strings.HasPrefix(fn, "http") && !presentedFileName[fn] { if strings.HasPrefix(fn, "http") && !presentedFileName[fn] {
fileNamesList = append(fileNamesList, f.FileName) fileNamesList = append(fileNamesList, f.FileName)
presentedFileName[fn] = true presentedFileName[fn] = true
} }
} }
return fileNamesList, nil return fileNamesList, nil
} }

View file

@ -0,0 +1,22 @@
package assets
import (
"openreplay/backend/internal/config/common"
"openreplay/backend/internal/config/configurator"
)
type Config struct {
common.Config
GroupCache string `env:"GROUP_CACHE,required"`
TopicCache string `env:"TOPIC_CACHE,required"`
AWSRegion string `env:"AWS_REGION,required"`
S3BucketAssets string `env:"S3_BUCKET_ASSETS,required"`
AssetsOrigin string `env:"ASSETS_ORIGIN,required"`
AssetsSizeLimit int `env:"ASSETS_SIZE_LIMIT,required"`
}
func New() *Config {
cfg := &Config{}
configurator.Process(cfg)
return cfg
}

View file

@ -0,0 +1,14 @@
package common
type Config struct {
ConfigFilePath string `env:"CONFIG_FILE_PATH"`
MessageSizeLimit int `env:"QUEUE_MESSAGE_SIZE_LIMIT,default=1048576"`
}
type Configer interface {
GetConfigPath() string
}
func (c *Config) GetConfigPath() string {
return c.ConfigFilePath
}

View file

@ -0,0 +1,104 @@
package configurator
import (
"context"
"fmt"
"io"
"log"
"os"
"reflect"
"strconv"
"strings"
"time"
"github.com/sethvargo/go-envconfig"
"openreplay/backend/internal/config/common"
)
func readFile(path string) (map[string]string, error) {
if path == "" {
return nil, fmt.Errorf("file path is empty")
}
file, err := os.Open(path)
if err != nil {
return nil, fmt.Errorf("can't open file: %s", err)
}
defer file.Close()
data, err := io.ReadAll(file)
if err != nil {
return nil, fmt.Errorf("can't read file: %s", err)
}
log.Println(data)
res := make(map[string]string)
lines := strings.Split(string(data), "\n")
for _, line := range lines {
env := strings.Split(line, "=")
res[env[0]] = env[1]
}
return res, nil
}
func parseFile(a interface{}, path string) {
envs, err := readFile(path)
if err != nil {
log.Printf("can't parse config file: %s", err)
return
}
val := reflect.ValueOf(a).Elem()
for i := 0; i < val.NumField(); i++ {
envName := val.Type().Field(i).Tag.Get("env")
if envName == "" {
continue
}
envName = strings.Split(envName, ",")[0]
if envName == "" {
continue
}
fmt.Println(envName, val.Type().Field(i).Type.String())
if value, ok := envs[envName]; ok {
switch val.Type().Field(i).Type.String() {
case "string":
val.Field(i).SetString(value)
case "int", "int8", "int16", "int32", "int64":
intValue, err := strconv.Atoi(value)
if err != nil {
log.Printf("can't parse int value: %s", err)
continue
}
val.Field(i).SetInt(int64(intValue))
case "uint", "uint8", "uint16", "uint32", "uint64":
uintValue, err := strconv.Atoi(value)
if err != nil {
log.Printf("can't parse uint value: %s", err)
continue
}
val.Field(i).SetUint(uint64(uintValue))
case "bool":
boolValue, err := strconv.ParseBool(value)
if err != nil {
log.Printf("can't parse bool value: %s", err)
}
val.Field(i).SetBool(boolValue)
case "time.Duration":
d, err := time.ParseDuration(value)
if err != nil {
log.Printf("can't parse time.Duration value: %s", err)
continue
}
val.Field(i).SetInt(int64(d))
default:
log.Println("unknown config type: ", val.Type().Field(i).Type.String())
}
}
}
}
func Process(cfg common.Configer) {
ctx := context.Background()
if err := envconfig.Process(ctx, cfg); err != nil {
log.Println("env process err: ", err)
//log.Fatal(err)
}
parseFile(cfg, cfg.GetConfigPath())
}

View file

@ -0,0 +1,26 @@
package db
import (
"openreplay/backend/internal/config/common"
"openreplay/backend/internal/config/configurator"
"time"
)
type Config struct {
common.Config
Postgres string `env:"POSTGRES_STRING,required"`
ProjectExpirationTimeoutMs int64 `env:"PROJECT_EXPIRATION_TIMEOUT_MS,default=1200000"`
LoggerTimeout int `env:"LOG_QUEUE_STATS_INTERVAL_SEC,required"`
GroupDB string `env:"GROUP_DB,required"`
TopicRawWeb string `env:"TOPIC_RAW_WEB,required"`
TopicAnalytics string `env:"TOPIC_ANALYTICS,required"`
CommitBatchTimeout time.Duration `env:"COMMIT_BATCH_TIMEOUT,default=15s"`
BatchQueueLimit int `env:"DB_BATCH_QUEUE_LIMIT,required"`
BatchSizeLimit int `env:"DB_BATCH_SIZE_LIMIT,required"`
}
func New() *Config {
cfg := &Config{}
configurator.Process(cfg)
return cfg
}

View file

@ -0,0 +1,23 @@
package ender
import (
"openreplay/backend/internal/config/common"
"openreplay/backend/internal/config/configurator"
)
type Config struct {
common.Config
Postgres string `env:"POSTGRES_STRING,required"`
ProjectExpirationTimeoutMs int64 `env:"PROJECT_EXPIRATION_TIMEOUT_MS,default=1200000"`
GroupEnder string `env:"GROUP_ENDER,required"`
LoggerTimeout int `env:"LOG_QUEUE_STATS_INTERVAL_SEC,required"`
TopicRawWeb string `env:"TOPIC_RAW_WEB,required"`
ProducerTimeout int `env:"PRODUCER_TIMEOUT,default=2000"`
PartitionsNumber int `env:"PARTITIONS_NUMBER,required"`
}
func New() *Config {
cfg := &Config{}
configurator.Process(cfg)
return cfg
}

View file

@ -0,0 +1,22 @@
package heuristics
import (
"openreplay/backend/internal/config/common"
"openreplay/backend/internal/config/configurator"
)
type Config struct {
common.Config
GroupHeuristics string `env:"GROUP_HEURISTICS,required"`
TopicAnalytics string `env:"TOPIC_ANALYTICS,required"`
LoggerTimeout int `env:"LOG_QUEUE_STATS_INTERVAL_SEC,required"`
TopicRawWeb string `env:"TOPIC_RAW_WEB,required"`
TopicRawIOS string `env:"TOPIC_RAW_IOS,required"`
ProducerTimeout int `env:"PRODUCER_TIMEOUT,default=2000"`
}
func New() *Config {
cfg := &Config{}
configurator.Process(cfg)
return cfg
}

View file

@ -0,0 +1,33 @@
package http
import (
"openreplay/backend/internal/config/common"
"openreplay/backend/internal/config/configurator"
"openreplay/backend/pkg/env"
"time"
)
type Config struct {
common.Config
HTTPHost string `env:"HTTP_HOST,default="`
HTTPPort string `env:"HTTP_PORT,required"`
HTTPTimeout time.Duration `env:"HTTP_TIMEOUT,default=60s"`
TopicRawWeb string `env:"TOPIC_RAW_WEB,required"`
TopicRawIOS string `env:"TOPIC_RAW_IOS,required"`
BeaconSizeLimit int64 `env:"BEACON_SIZE_LIMIT,required"`
JsonSizeLimit int64 `env:"JSON_SIZE_LIMIT,default=1000"`
FileSizeLimit int64 `env:"FILE_SIZE_LIMIT,default=10000000"`
AWSRegion string `env:"AWS_REGION,required"`
S3BucketIOSImages string `env:"S3_BUCKET_IOS_IMAGES,required"`
Postgres string `env:"POSTGRES_STRING,required"`
TokenSecret string `env:"TOKEN_SECRET,required"`
UAParserFile string `env:"UAPARSER_FILE,required"`
MaxMinDBFile string `env:"MAXMINDDB_FILE,required"`
WorkerID uint16
}
func New() *Config {
cfg := &Config{WorkerID: env.WorkerID()}
configurator.Process(cfg)
return cfg
}

View file

@ -0,0 +1,19 @@
package integrations
import (
"openreplay/backend/internal/config/common"
"openreplay/backend/internal/config/configurator"
)
type Config struct {
common.Config
TopicAnalytics string `env:"TOPIC_ANALYTICS,required"`
PostgresURI string `env:"POSTGRES_STRING,required"`
TokenSecret string `env:"TOKEN_SECRET,required"`
}
func New() *Config {
cfg := &Config{}
configurator.Process(cfg)
return cfg
}

View file

@ -0,0 +1,26 @@
package sink
import (
"openreplay/backend/internal/config/common"
"openreplay/backend/internal/config/configurator"
)
type Config struct {
common.Config
FsDir string `env:"FS_DIR,required"`
FsUlimit uint16 `env:"FS_ULIMIT,required"`
GroupSink string `env:"GROUP_SINK,required"`
TopicRawWeb string `env:"TOPIC_RAW_WEB,required"`
TopicRawIOS string `env:"TOPIC_RAW_IOS,required"`
TopicCache string `env:"TOPIC_CACHE,required"`
TopicTrigger string `env:"TOPIC_TRIGGER,required"`
CacheAssets bool `env:"CACHE_ASSETS,required"`
AssetsOrigin string `env:"ASSETS_ORIGIN,required"`
ProducerCloseTimeout int `env:"PRODUCER_CLOSE_TIMEOUT,default=15000"`
}
func New() *Config {
cfg := &Config{}
configurator.Process(cfg)
return cfg
}

View file

@ -0,0 +1,26 @@
package storage
import (
"openreplay/backend/internal/config/common"
"openreplay/backend/internal/config/configurator"
"time"
)
type Config struct {
common.Config
S3Region string `env:"AWS_REGION_WEB,required"`
S3Bucket string `env:"S3_BUCKET_WEB,required"`
FSDir string `env:"FS_DIR,required"`
FSCleanHRS int `env:"FS_CLEAN_HRS,required"`
FileSplitSize int `env:"FILE_SPLIT_SIZE,required"`
RetryTimeout time.Duration `env:"RETRY_TIMEOUT,default=2m"`
GroupStorage string `env:"GROUP_STORAGE,required"`
TopicTrigger string `env:"TOPIC_TRIGGER,required"`
DeleteTimeout time.Duration `env:"DELETE_TIMEOUT,default=48h"`
}
func New() *Config {
cfg := &Config{}
configurator.Process(cfg)
return cfg
}

View file

@ -0,0 +1,80 @@
package datasaver
import (
"fmt"
. "openreplay/backend/pkg/messages"
)
func (mi *Saver) InsertMessage(sessionID uint64, msg Message) error {
switch m := msg.(type) {
// Common
case *Metadata:
if err := mi.pg.InsertMetadata(sessionID, m); err != nil {
return fmt.Errorf("insert metadata err: %s", err)
}
return nil
case *IssueEvent:
return mi.pg.InsertIssueEvent(sessionID, m)
//TODO: message adapter (transformer) (at the level of pkg/message) for types: *IOSMetadata, *IOSIssueEvent and others
// Web
case *SessionStart:
return mi.pg.HandleWebSessionStart(sessionID, m)
case *SessionEnd:
return mi.pg.HandleWebSessionEnd(sessionID, m)
case *UserID:
return mi.pg.InsertWebUserID(sessionID, m)
case *UserAnonymousID:
return mi.pg.InsertWebUserAnonymousID(sessionID, m)
case *CustomEvent:
return mi.pg.InsertWebCustomEvent(sessionID, m)
case *ClickEvent:
return mi.pg.InsertWebClickEvent(sessionID, m)
case *InputEvent:
return mi.pg.InsertWebInputEvent(sessionID, m)
// Unique Web messages
case *PageEvent:
return mi.pg.InsertWebPageEvent(sessionID, m)
case *ErrorEvent:
return mi.pg.InsertWebErrorEvent(sessionID, m)
case *FetchEvent:
return mi.pg.InsertWebFetchEvent(sessionID, m)
case *GraphQLEvent:
return mi.pg.InsertWebGraphQLEvent(sessionID, m)
case *IntegrationEvent:
return mi.pg.InsertWebErrorEvent(sessionID, &ErrorEvent{
MessageID: m.Meta().Index,
Timestamp: m.Timestamp,
Source: m.Source,
Name: m.Name,
Message: m.Message,
Payload: m.Payload,
})
// IOS
case *IOSSessionStart:
return mi.pg.InsertIOSSessionStart(sessionID, m)
case *IOSSessionEnd:
return mi.pg.InsertIOSSessionEnd(sessionID, m)
case *IOSUserID:
return mi.pg.InsertIOSUserID(sessionID, m)
case *IOSUserAnonymousID:
return mi.pg.InsertIOSUserAnonymousID(sessionID, m)
case *IOSCustomEvent:
return mi.pg.InsertIOSCustomEvent(sessionID, m)
case *IOSClickEvent:
return mi.pg.InsertIOSClickEvent(sessionID, m)
case *IOSInputEvent:
return mi.pg.InsertIOSInputEvent(sessionID, m)
// Unique IOS messages
case *IOSNetworkCall:
return mi.pg.InsertIOSNetworkCall(sessionID, m)
case *IOSScreenEnter:
return mi.pg.InsertIOSScreenEnter(sessionID, m)
case *IOSCrash:
return mi.pg.InsertIOSCrash(sessionID, m)
}
return nil // "Not implemented"
}

View file

@ -0,0 +1,11 @@
package datasaver
import "openreplay/backend/pkg/db/cache"
type Saver struct {
pg *cache.PGCache
}
func New(pg *cache.PGCache) *Saver {
return &Saver{pg: pg}
}

View file

@ -0,0 +1,27 @@
package datasaver
import (
. "openreplay/backend/pkg/db/types"
. "openreplay/backend/pkg/messages"
)
func (si *Saver) InitStats() {
// noop
}
func (si *Saver) InsertStats(session *Session, msg Message) error {
switch m := msg.(type) {
// Web
case *PerformanceTrackAggr:
return si.pg.InsertWebStatsPerformance(session.SessionID, m)
case *ResourceEvent:
return si.pg.InsertWebStatsResourceEvent(session.SessionID, m)
case *LongTask:
return si.pg.InsertWebStatsLongtask(session.SessionID, m)
}
return nil
}
func (si *Saver) CommitStats() error {
return nil
}

View file

@ -0,0 +1,138 @@
package ios
import (
"strings"
)
func MapIOSDevice(identifier string) string {
switch identifier {
case "iPod5,1":
return "iPod touch (5th generation)"
case "iPod7,1":
return "iPod touch (6th generation)"
case "iPod9,1":
return "iPod touch (7th generation)"
case "iPhone3,1", "iPhone3,2", "iPhone3,3":
return "iPhone 4"
case "iPhone4,1":
return "iPhone 4s"
case "iPhone5,1", "iPhone5,2":
return "iPhone 5"
case "iPhone5,3", "iPhone5,4":
return "iPhone 5c"
case "iPhone6,1", "iPhone6,2":
return "iPhone 5s"
case "iPhone7,2":
return "iPhone 6"
case "iPhone7,1":
return "iPhone 6 Plus"
case "iPhone8,1":
return "iPhone 6s"
case "iPhone8,2":
return "iPhone 6s Plus"
case "iPhone8,4":
return "iPhone SE"
case "iPhone9,1", "iPhone9,3":
return "iPhone 7"
case "iPhone9,2", "iPhone9,4":
return "iPhone 7 Plus"
case "iPhone10,1", "iPhone10,4":
return "iPhone 8"
case "iPhone10,2", "iPhone10,5":
return "iPhone 8 Plus"
case "iPhone10,3", "iPhone10,6":
return "iPhone X"
case "iPhone11,2":
return "iPhone XS"
case "iPhone11,4", "iPhone11,6":
return "iPhone XS Max"
case "iPhone11,8":
return "iPhone XR"
case "iPhone12,1":
return "iPhone 11"
case "iPhone12,3":
return "iPhone 11 Pro"
case "iPhone12,5":
return "iPhone 11 Pro Max"
case "iPhone12,8":
return "iPhone SE (2nd generation)"
case "iPhone13,1":
return "iPhone 12 mini"
case "iPhone13,2":
return "iPhone 12"
case "iPhone13,3":
return "iPhone 12 Pro"
case "iPhone13,4":
return "iPhone 12 Pro Max"
case "iPad2,1", "iPad2,2", "iPad2,3", "iPad2,4":
return "iPad 2"
case "iPad3,1", "iPad3,2", "iPad3,3":
return "iPad (3rd generation)"
case "iPad3,4", "iPad3,5", "iPad3,6":
return "iPad (4th generation)"
case "iPad6,11", "iPad6,12":
return "iPad (5th generation)"
case "iPad7,5", "iPad7,6":
return "iPad (6th generation)"
case "iPad7,11", "iPad7,12":
return "iPad (7th generation)"
case "iPad11,6", "iPad11,7":
return "iPad (8th generation)"
case "iPad4,1", "iPad4,2", "iPad4,3":
return "iPad Air"
case "iPad5,3", "iPad5,4":
return "iPad Air 2"
case "iPad11,3", "iPad11,4":
return "iPad Air (3rd generation)"
case "iPad13,1", "iPad13,2":
return "iPad Air (4th generation)"
case "iPad2,5", "iPad2,6", "iPad2,7":
return "iPad mini"
case "iPad4,4", "iPad4,5", "iPad4,6":
return "iPad mini 2"
case "iPad4,7", "iPad4,8", "iPad4,9":
return "iPad mini 3"
case "iPad5,1", "iPad5,2":
return "iPad mini 4"
case "iPad11,1", "iPad11,2":
return "iPad mini (5th generation)"
case "iPad6,3", "iPad6,4":
return "iPad Pro (9.7-inch)"
case "iPad7,3", "iPad7,4":
return "iPad Pro (10.5-inch)"
case "iPad8,1", "iPad8,2", "iPad8,3", "iPad8,4":
return "iPad Pro (11-inch) (1st generation)"
case "iPad8,9", "iPad8,10":
return "iPad Pro (11-inch) (2nd generation)"
case "iPad6,7", "iPad6,8":
return "iPad Pro (12.9-inch) (1st generation)"
case "iPad7,1", "iPad7,2":
return "iPad Pro (12.9-inch) (2nd generation)"
case "iPad8,5", "iPad8,6", "iPad8,7", "iPad8,8":
return "iPad Pro (12.9-inch) (3rd generation)"
case "iPad8,11", "iPad8,12":
return "iPad Pro (12.9-inch) (4th generation)"
case "AppleTV5,3":
return "Apple TV"
case "AppleTV6,2":
return "Apple TV 4K"
case "AudioAccessory1,1":
return "HomePod"
case "AudioAccessory5,1":
return "HomePod mini"
case "i386", "x86_64":
return "Simulator"
default:
return identifier
}
}
func GetIOSDeviceType(identifier string) string {
if strings.Contains(identifier, "iPhone") {
return "mobile" //"phone"
}
if strings.Contains(identifier, "iPad") {
return "tablet"
}
return "other"
}

View file

@ -0,0 +1,172 @@
package router
import (
"encoding/json"
"errors"
"log"
"math/rand"
"net/http"
"openreplay/backend/internal/http/ios"
"openreplay/backend/internal/http/uuid"
"strconv"
"time"
"openreplay/backend/pkg/db/postgres"
. "openreplay/backend/pkg/messages"
"openreplay/backend/pkg/token"
)
func (e *Router) startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
req := &StartIOSSessionRequest{}
if r.Body == nil {
ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty"))
return
}
body := http.MaxBytesReader(w, r.Body, e.cfg.JsonSizeLimit)
defer body.Close()
if err := json.NewDecoder(body).Decode(req); err != nil {
ResponseWithError(w, http.StatusBadRequest, err)
return
}
if req.ProjectKey == nil {
ResponseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required"))
return
}
p, err := e.services.Database.GetProjectByKey(*req.ProjectKey)
if err != nil {
if postgres.IsNoRowsErr(err) {
ResponseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or is not active"))
} else {
ResponseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
}
return
}
userUUID := uuid.GetUUID(req.UserUUID)
tokenData, err := e.services.Tokenizer.Parse(req.Token)
if err != nil { // Starting the new one
dice := byte(rand.Intn(100)) // [0, 100)
if dice >= p.SampleRate {
ResponseWithError(w, http.StatusForbidden, errors.New("cancel"))
return
}
ua := e.services.UaParser.ParseFromHTTPRequest(r)
if ua == nil {
ResponseWithError(w, http.StatusForbidden, errors.New("browser not recognized"))
return
}
sessionID, err := e.services.Flaker.Compose(uint64(startTime.UnixMilli()))
if err != nil {
ResponseWithError(w, http.StatusInternalServerError, err)
return
}
// TODO: if EXPIRED => send message for two sessions association
expTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond)
tokenData = &token.TokenData{sessionID, expTime.UnixMilli()}
country := e.services.GeoIP.ExtractISOCodeFromHTTPRequest(r)
// The difference with web is mostly here:
e.services.Producer.Produce(e.cfg.TopicRawIOS, tokenData.ID, Encode(&IOSSessionStart{
Timestamp: req.Timestamp,
ProjectID: uint64(p.ProjectID),
TrackerVersion: req.TrackerVersion,
RevID: req.RevID,
UserUUID: userUUID,
UserOS: "IOS",
UserOSVersion: req.UserOSVersion,
UserDevice: ios.MapIOSDevice(req.UserDevice),
UserDeviceType: ios.GetIOSDeviceType(req.UserDevice),
UserCountry: country,
}))
}
ResponseWithJSON(w, &StartIOSSessionResponse{
Token: e.services.Tokenizer.Compose(*tokenData),
UserUUID: userUUID,
SessionID: strconv.FormatUint(tokenData.ID, 10),
BeaconSizeLimit: e.cfg.BeaconSizeLimit,
})
}
func (e *Router) pushMessagesHandlerIOS(w http.ResponseWriter, r *http.Request) {
sessionData, err := e.services.Tokenizer.ParseFromHTTPRequest(r)
if err != nil {
ResponseWithError(w, http.StatusUnauthorized, err)
return
}
e.pushMessages(w, r, sessionData.ID, e.cfg.TopicRawIOS)
}
func (e *Router) pushLateMessagesHandlerIOS(w http.ResponseWriter, r *http.Request) {
sessionData, err := e.services.Tokenizer.ParseFromHTTPRequest(r)
if err != nil && err != token.EXPIRED {
ResponseWithError(w, http.StatusUnauthorized, err)
return
}
// Check timestamps here?
e.pushMessages(w, r, sessionData.ID, e.cfg.TopicRawIOS)
}
func (e *Router) imagesUploadHandlerIOS(w http.ResponseWriter, r *http.Request) {
log.Printf("recieved imagerequest")
sessionData, err := e.services.Tokenizer.ParseFromHTTPRequest(r)
if err != nil { // Should accept expired token?
ResponseWithError(w, http.StatusUnauthorized, err)
return
}
if r.Body == nil {
ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty"))
return
}
r.Body = http.MaxBytesReader(w, r.Body, e.cfg.FileSizeLimit)
defer r.Body.Close()
err = r.ParseMultipartForm(1e6) // ~1Mb
if err == http.ErrNotMultipart || err == http.ErrMissingBoundary {
ResponseWithError(w, http.StatusUnsupportedMediaType, err)
return
// } else if err == multipart.ErrMessageTooLarge // if non-files part exceeds 10 MB
} else if err != nil {
ResponseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
return
}
if r.MultipartForm == nil {
ResponseWithError(w, http.StatusInternalServerError, errors.New("Multipart not parsed"))
return
}
if len(r.MultipartForm.Value["projectKey"]) == 0 {
ResponseWithError(w, http.StatusBadRequest, errors.New("projectKey parameter missing")) // status for missing/wrong parameter?
return
}
prefix := r.MultipartForm.Value["projectKey"][0] + "/" + strconv.FormatUint(sessionData.ID, 10) + "/"
for _, fileHeaderList := range r.MultipartForm.File {
for _, fileHeader := range fileHeaderList {
file, err := fileHeader.Open()
if err != nil {
continue // TODO: send server error or accumulate successful files
}
key := prefix + fileHeader.Filename
log.Printf("Uploading image... %v", key)
go func() { //TODO: mime type from header
if err := e.services.Storage.Upload(file, key, "image/jpeg", false); err != nil {
log.Printf("Upload ios screen error. %v", err)
}
}()
}
}
w.WriteHeader(http.StatusOK)
}

View file

@ -0,0 +1,222 @@
package router
import (
"encoding/json"
"errors"
"go.opentelemetry.io/otel/attribute"
"io"
"log"
"math/rand"
"net/http"
"openreplay/backend/internal/http/uuid"
"strconv"
"time"
"openreplay/backend/pkg/db/postgres"
. "openreplay/backend/pkg/messages"
"openreplay/backend/pkg/token"
)
func (e *Router) readBody(w http.ResponseWriter, r *http.Request, limit int64) ([]byte, error) {
body := http.MaxBytesReader(w, r.Body, limit)
bodyBytes, err := io.ReadAll(body)
if closeErr := body.Close(); closeErr != nil {
log.Printf("error while closing request body: %s", closeErr)
}
if err != nil {
return nil, err
}
reqSize := len(bodyBytes)
e.requestSize.Record(
r.Context(),
float64(reqSize),
[]attribute.KeyValue{attribute.String("method", r.URL.Path)}...,
)
return bodyBytes, nil
}
func (e *Router) startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
// Check request body
if r.Body == nil {
ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty"))
return
}
bodyBytes, err := e.readBody(w, r, e.cfg.JsonSizeLimit)
if err != nil {
log.Printf("error while reading request body: %s", err)
ResponseWithError(w, http.StatusRequestEntityTooLarge, err)
return
}
// Parse request body
req := &StartSessionRequest{}
if err := json.Unmarshal(bodyBytes, req); err != nil {
ResponseWithError(w, http.StatusBadRequest, err)
return
}
// Handler's logic
if req.ProjectKey == nil {
ResponseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required"))
return
}
p, err := e.services.Database.GetProjectByKey(*req.ProjectKey)
if err != nil {
if postgres.IsNoRowsErr(err) {
ResponseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or capture limit has been reached"))
} else {
ResponseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
}
return
}
userUUID := uuid.GetUUID(req.UserUUID)
tokenData, err := e.services.Tokenizer.Parse(req.Token)
if err != nil || req.Reset { // Starting the new one
dice := byte(rand.Intn(100)) // [0, 100)
if dice >= p.SampleRate {
ResponseWithError(w, http.StatusForbidden, errors.New("cancel"))
return
}
ua := e.services.UaParser.ParseFromHTTPRequest(r)
if ua == nil {
ResponseWithError(w, http.StatusForbidden, errors.New("browser not recognized"))
return
}
sessionID, err := e.services.Flaker.Compose(uint64(startTime.UnixMilli()))
if err != nil {
ResponseWithError(w, http.StatusInternalServerError, err)
return
}
// TODO: if EXPIRED => send message for two sessions association
expTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond)
tokenData = &token.TokenData{ID: sessionID, ExpTime: expTime.UnixMilli()}
sessionStart := &SessionStart{
Timestamp: req.Timestamp,
ProjectID: uint64(p.ProjectID),
TrackerVersion: req.TrackerVersion,
RevID: req.RevID,
UserUUID: userUUID,
UserAgent: r.Header.Get("User-Agent"),
UserOS: ua.OS,
UserOSVersion: ua.OSVersion,
UserBrowser: ua.Browser,
UserBrowserVersion: ua.BrowserVersion,
UserDevice: ua.Device,
UserDeviceType: ua.DeviceType,
UserCountry: e.services.GeoIP.ExtractISOCodeFromHTTPRequest(r),
UserDeviceMemorySize: req.DeviceMemory,
UserDeviceHeapSize: req.JsHeapSizeLimit,
UserID: req.UserID,
}
// Save sessionStart to db
if err := e.services.Database.InsertWebSessionStart(sessionID, sessionStart); err != nil {
log.Printf("can't insert session start: %s", err)
}
// Send sessionStart message to kafka
if err := e.services.Producer.Produce(e.cfg.TopicRawWeb, tokenData.ID, Encode(sessionStart)); err != nil {
log.Printf("can't send session start: %s", err)
}
}
ResponseWithJSON(w, &StartSessionResponse{
Token: e.services.Tokenizer.Compose(*tokenData),
UserUUID: userUUID,
SessionID: strconv.FormatUint(tokenData.ID, 10),
BeaconSizeLimit: e.cfg.BeaconSizeLimit,
})
}
func (e *Router) pushMessagesHandlerWeb(w http.ResponseWriter, r *http.Request) {
// Check authorization
sessionData, err := e.services.Tokenizer.ParseFromHTTPRequest(r)
if err != nil {
ResponseWithError(w, http.StatusUnauthorized, err)
return
}
// Check request body
if r.Body == nil {
ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty"))
return
}
bodyBytes, err := e.readBody(w, r, e.cfg.BeaconSizeLimit)
if err != nil {
log.Printf("error while reading request body: %s", err)
ResponseWithError(w, http.StatusRequestEntityTooLarge, err)
return
}
// Send processed messages to queue as array of bytes
// TODO: check bytes for nonsense crap
err = e.services.Producer.Produce(e.cfg.TopicRawWeb, sessionData.ID, bodyBytes)
if err != nil {
log.Printf("can't send processed messages to queue: %s", err)
}
w.WriteHeader(http.StatusOK)
}
func (e *Router) notStartedHandlerWeb(w http.ResponseWriter, r *http.Request) {
// Check request body
if r.Body == nil {
ResponseWithError(w, http.StatusBadRequest, errors.New("request body is empty"))
return
}
bodyBytes, err := e.readBody(w, r, e.cfg.JsonSizeLimit)
if err != nil {
log.Printf("error while reading request body: %s", err)
ResponseWithError(w, http.StatusRequestEntityTooLarge, err)
return
}
// Parse request body
req := &NotStartedRequest{}
if err := json.Unmarshal(bodyBytes, req); err != nil {
ResponseWithError(w, http.StatusBadRequest, err)
return
}
// Handler's logic
if req.ProjectKey == nil {
ResponseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required"))
return
}
ua := e.services.UaParser.ParseFromHTTPRequest(r) // TODO?: insert anyway
if ua == nil {
ResponseWithError(w, http.StatusForbidden, errors.New("browser not recognized"))
return
}
country := e.services.GeoIP.ExtractISOCodeFromHTTPRequest(r)
err = e.services.Database.InsertUnstartedSession(postgres.UnstartedSession{
ProjectKey: *req.ProjectKey,
TrackerVersion: req.TrackerVersion,
DoNotTrack: req.DoNotTrack,
Platform: "web",
UserAgent: r.Header.Get("User-Agent"),
UserOS: ua.OS,
UserOSVersion: ua.OSVersion,
UserBrowser: ua.Browser,
UserBrowserVersion: ua.BrowserVersion,
UserDevice: ua.Device,
UserDeviceType: ua.DeviceType,
UserCountry: country,
})
if err != nil {
log.Printf("Unable to insert Unstarted Session: %v\n", err)
}
w.WriteHeader(http.StatusOK)
}

View file

@ -1,28 +1,27 @@
package main package router
import ( import (
gzip "github.com/klauspost/pgzip"
"io" "io"
"io/ioutil" "io/ioutil"
"log" "log"
"net/http" "net/http"
gzip "github.com/klauspost/pgzip"
) )
const JSON_SIZE_LIMIT int64 = 1e3 // 1Kb func (e *Router) pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64, topicName string) {
body := http.MaxBytesReader(w, r.Body, e.cfg.BeaconSizeLimit)
func pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64, topicName string) {
body := http.MaxBytesReader(w, r.Body, BEACON_SIZE_LIMIT)
defer body.Close() defer body.Close()
var reader io.ReadCloser var reader io.ReadCloser
var err error var err error
switch r.Header.Get("Content-Encoding") { switch r.Header.Get("Content-Encoding") {
case "gzip": case "gzip":
log.Println("Gzip", reader) log.Println("Gzip", reader)
reader, err = gzip.NewReader(body) reader, err = gzip.NewReader(body)
if err != nil { if err != nil {
responseWithError(w, http.StatusInternalServerError, err) // TODO: stage-dependent responce ResponseWithError(w, http.StatusInternalServerError, err) // TODO: stage-dependent response
return return
} }
log.Println("Gzip reader init", reader) log.Println("Gzip reader init", reader)
@ -33,9 +32,9 @@ func pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64, topi
log.Println("Reader after switch:", reader) log.Println("Reader after switch:", reader)
buf, err := ioutil.ReadAll(reader) buf, err := ioutil.ReadAll(reader)
if err != nil { if err != nil {
responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging ResponseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
return return
} }
producer.Produce(topicName, sessionID, buf) // What if not able to send? e.services.Producer.Produce(topicName, sessionID, buf) // What if not able to send?
w.WriteHeader(http.StatusOK) w.WriteHeader(http.StatusOK)
} }

View file

@ -0,0 +1,49 @@
package router
type StartSessionRequest struct {
Token string `json:"token"`
UserUUID *string `json:"userUUID"`
RevID string `json:"revID"`
Timestamp uint64 `json:"timestamp"`
TrackerVersion string `json:"trackerVersion"`
IsSnippet bool `json:"isSnippet"`
DeviceMemory uint64 `json:"deviceMemory"`
JsHeapSizeLimit uint64 `json:"jsHeapSizeLimit"`
ProjectKey *string `json:"projectKey"`
Reset bool `json:"reset"`
UserID string `json:"userID"`
}
type StartSessionResponse struct {
Timestamp int64 `json:"timestamp"`
Delay int64 `json:"delay"`
Token string `json:"token"`
UserUUID string `json:"userUUID"`
SessionID string `json:"sessionID"`
BeaconSizeLimit int64 `json:"beaconSizeLimit"`
}
type NotStartedRequest struct {
ProjectKey *string `json:"projectKey"`
TrackerVersion string `json:"trackerVersion"`
DoNotTrack bool `json:"DoNotTrack"`
}
type StartIOSSessionRequest struct {
Token string `json:"token"`
ProjectKey *string `json:"projectKey"`
TrackerVersion string `json:"trackerVersion"`
RevID string `json:"revID"`
UserUUID *string `json:"userUUID"`
UserOSVersion string `json:"userOSVersion"`
UserDevice string `json:"userDevice"`
Timestamp uint64 `json:"timestamp"`
}
type StartIOSSessionResponse struct {
Token string `json:"token"`
ImagesHashList []string `json:"imagesHashList"`
UserUUID string `json:"userUUID"`
BeaconSizeLimit int64 `json:"beaconSizeLimit"`
SessionID string `json:"sessionID"`
}

View file

@ -1,4 +1,4 @@
package main package router
import ( import (
"encoding/json" "encoding/json"
@ -6,7 +6,7 @@ import (
"net/http" "net/http"
) )
func responseWithJSON(w http.ResponseWriter, res interface{}) { func ResponseWithJSON(w http.ResponseWriter, res interface{}) {
body, err := json.Marshal(res) body, err := json.Marshal(res)
if err != nil { if err != nil {
log.Println(err) log.Println(err)
@ -15,10 +15,10 @@ func responseWithJSON(w http.ResponseWriter, res interface{}) {
w.Write(body) w.Write(body)
} }
func responseWithError(w http.ResponseWriter, code int, err error) { func ResponseWithError(w http.ResponseWriter, code int, err error) {
type response struct { type response struct {
Error string `json:"error"` Error string `json:"error"`
} }
w.WriteHeader(code) w.WriteHeader(code)
responseWithJSON(w, &response{err.Error()}) ResponseWithJSON(w, &response{err.Error()})
} }

View file

@ -0,0 +1,119 @@
package router
import (
"context"
"fmt"
"github.com/gorilla/mux"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/metric/instrument/syncfloat64"
"log"
"net/http"
http3 "openreplay/backend/internal/config/http"
http2 "openreplay/backend/internal/http/services"
"openreplay/backend/pkg/monitoring"
"time"
)
type Router struct {
router *mux.Router
cfg *http3.Config
services *http2.ServicesBuilder
requestSize syncfloat64.Histogram
requestDuration syncfloat64.Histogram
totalRequests syncfloat64.Counter
}
func NewRouter(cfg *http3.Config, services *http2.ServicesBuilder, metrics *monitoring.Metrics) (*Router, error) {
switch {
case cfg == nil:
return nil, fmt.Errorf("config is empty")
case services == nil:
return nil, fmt.Errorf("services is empty")
case metrics == nil:
return nil, fmt.Errorf("metrics is empty")
}
e := &Router{
cfg: cfg,
services: services,
}
e.initMetrics(metrics)
e.init()
return e, nil
}
func (e *Router) init() {
e.router = mux.NewRouter()
// Root path
e.router.HandleFunc("/", e.root)
handlers := map[string]func(http.ResponseWriter, *http.Request){
"/v1/web/not-started": e.notStartedHandlerWeb,
"/v1/web/start": e.startSessionHandlerWeb,
"/v1/web/i": e.pushMessagesHandlerWeb,
"/v1/ios/start": e.startSessionHandlerIOS,
"/v1/ios/i": e.pushMessagesHandlerIOS,
"/v1/ios/late": e.pushLateMessagesHandlerIOS,
"/v1/ios/images": e.imagesUploadHandlerIOS,
}
prefix := "/ingest"
for path, handler := range handlers {
e.router.HandleFunc(path, handler).Methods("POST", "OPTIONS")
e.router.HandleFunc(prefix+path, handler).Methods("POST", "OPTIONS")
}
// CORS middleware
e.router.Use(e.corsMiddleware)
}
func (e *Router) initMetrics(metrics *monitoring.Metrics) {
var err error
e.requestSize, err = metrics.RegisterHistogram("requests_body_size")
if err != nil {
log.Printf("can't create requests_body_size metric: %s", err)
}
e.requestDuration, err = metrics.RegisterHistogram("requests_duration")
if err != nil {
log.Printf("can't create requests_duration metric: %s", err)
}
e.totalRequests, err = metrics.RegisterCounter("requests_total")
if err != nil {
log.Printf("can't create requests_total metric: %s", err)
}
}
func (e *Router) root(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusOK)
}
func (e *Router) corsMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
// Prepare headers for preflight requests
w.Header().Set("Access-Control-Allow-Origin", "*")
w.Header().Set("Access-Control-Allow-Methods", "POST")
w.Header().Set("Access-Control-Allow-Headers", "Content-Type,Authorization")
if r.Method == http.MethodOptions {
w.Header().Set("Cache-Control", "max-age=86400")
w.WriteHeader(http.StatusOK)
return
}
log.Printf("Request: %v - %v ", r.Method, r.URL.Path)
requestStart := time.Now()
// Serve request
next.ServeHTTP(w, r)
metricsContext, _ := context.WithTimeout(context.Background(), time.Millisecond*100)
e.totalRequests.Add(metricsContext, 1)
e.requestDuration.Record(metricsContext,
float64(time.Now().Sub(requestStart).Milliseconds()),
[]attribute.KeyValue{attribute.String("method", r.URL.Path)}...,
)
})
}
func (e *Router) GetHandler() http.Handler {
return e.router
}

View file

@ -0,0 +1,46 @@
package server
import (
"context"
"errors"
"fmt"
"golang.org/x/net/http2"
"log"
"net/http"
"time"
)
type Server struct {
server *http.Server
}
func New(handler http.Handler, host, port string, timeout time.Duration) (*Server, error) {
switch {
case port == "":
return nil, errors.New("empty server port")
case handler == nil:
return nil, errors.New("empty handler")
case timeout < 1:
return nil, fmt.Errorf("invalid timeout %d", timeout)
}
server := &http.Server{
Addr: fmt.Sprintf("%s:%s", host, port),
Handler: handler,
ReadTimeout: timeout,
WriteTimeout: timeout,
}
if err := http2.ConfigureServer(server, nil); err != nil {
log.Printf("can't configure http2 server: %s", err)
}
return &Server{
server: server,
}, nil
}
func (s *Server) Start() error {
return s.server.ListenAndServe()
}
func (s *Server) Stop() {
s.server.Shutdown(context.Background())
}

View file

@ -0,0 +1,34 @@
package services
import (
"openreplay/backend/internal/config/http"
"openreplay/backend/internal/http/geoip"
"openreplay/backend/internal/http/uaparser"
"openreplay/backend/pkg/db/cache"
"openreplay/backend/pkg/flakeid"
"openreplay/backend/pkg/queue/types"
"openreplay/backend/pkg/storage"
"openreplay/backend/pkg/token"
)
type ServicesBuilder struct {
Database *cache.PGCache
Producer types.Producer
Flaker *flakeid.Flaker
UaParser *uaparser.UAParser
GeoIP *geoip.GeoIP
Tokenizer *token.Tokenizer
Storage *storage.S3
}
func New(cfg *http.Config, producer types.Producer, pgconn *cache.PGCache) *ServicesBuilder {
return &ServicesBuilder{
Database: pgconn,
Producer: producer,
Storage: storage.NewS3(cfg.AWSRegion, cfg.S3BucketIOSImages),
Tokenizer: token.NewTokenizer(cfg.TokenSecret),
UaParser: uaparser.NewUAParser(cfg.UAParserFile),
GeoIP: geoip.NewGeoIP(cfg.MaxMinDBFile),
Flaker: flakeid.NewFlaker(cfg.WorkerID),
}
}

View file

@ -1,10 +1,10 @@
package main package uuid
import ( import (
"github.com/google/uuid" "github.com/google/uuid"
) )
func getUUID(u *string) string { func GetUUID(u *string) string {
if u != nil { if u != nil {
_, err := uuid.Parse(*u) _, err := uuid.Parse(*u)
if err == nil { if err == nil {
@ -12,4 +12,4 @@ func getUUID(u *string) string {
} }
} }
return uuid.New().String() return uuid.New().String()
} }

View file

@ -0,0 +1,49 @@
package clientManager
import (
"openreplay/backend/internal/integrations/integration"
"strconv"
"openreplay/backend/pkg/db/postgres"
)
type manager struct {
clientMap integration.ClientMap
Events chan *integration.SessionErrorEvent
Errors chan error
RequestDataUpdates chan postgres.Integration // not pointer because it could change in other thread
}
func NewManager() *manager {
return &manager{
clientMap: make(integration.ClientMap),
RequestDataUpdates: make(chan postgres.Integration, 100),
Events: make(chan *integration.SessionErrorEvent, 100),
Errors: make(chan error, 100),
}
}
func (m *manager) Update(i *postgres.Integration) error {
key := strconv.Itoa(int(i.ProjectID)) + i.Provider
if i.Options == nil {
delete(m.clientMap, key)
return nil
}
c, exists := m.clientMap[key]
if !exists {
c, err := integration.NewClient(i, m.RequestDataUpdates, m.Events, m.Errors)
if err != nil {
return err
}
m.clientMap[key] = c
return nil
}
return c.Update(i)
}
func (m *manager) RequestAll() {
for _, c := range m.clientMap {
go c.Request()
}
}

View file

@ -97,7 +97,7 @@ func (b *bugsnag) Request(c *client) error {
c.evChan <- &SessionErrorEvent{ c.evChan <- &SessionErrorEvent{
SessionID: sessionID, SessionID: sessionID,
Token: token, Token: token,
RawErrorEvent: &messages.RawErrorEvent{ IntegrationEvent: &messages.IntegrationEvent{
Source: "bugsnag", Source: "bugsnag",
Timestamp: timestamp, Timestamp: timestamp,
Name: e.Exceptions[0].Message, Name: e.Exceptions[0].Message,

View file

@ -40,7 +40,7 @@ type client struct {
type SessionErrorEvent struct { type SessionErrorEvent struct {
SessionID uint64 SessionID uint64
Token string Token string
*messages.RawErrorEvent *messages.IntegrationEvent
} }
type ClientMap map[string]*client type ClientMap map[string]*client

View file

@ -2,43 +2,40 @@ package integration
import ( import (
"github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/cloudwatchlogs"
"github.com/aws/aws-sdk-go/aws/credentials" "github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/cloudwatchlogs"
"strings"
"regexp"
"openreplay/backend/pkg/messages" "openreplay/backend/pkg/messages"
"regexp"
"strings"
) )
var reIsException = regexp.MustCompile(`(?i)exception|error`) var reIsException = regexp.MustCompile(`(?i)exception|error`)
type cloudwatch struct { type cloudwatch struct {
AwsAccessKeyId string // `json:"aws_access_key_id"` AwsAccessKeyId string // `json:"aws_access_key_id"`
AwsSecretAccessKey string // `json:"aws_secret_access_key"` AwsSecretAccessKey string // `json:"aws_secret_access_key"`
LogGroupName string // `json:"log_group_name"` LogGroupName string // `json:"log_group_name"`
Region string // `json:"region"` Region string // `json:"region"`
} }
func (cw *cloudwatch) Request(c *client) error { func (cw *cloudwatch) Request(c *client) error {
startTs := int64(c.getLastMessageTimestamp() + 1) // From next millisecond startTs := int64(c.getLastMessageTimestamp() + 1) // From next millisecond
//endTs := utils.CurrentTimestamp() //endTs := utils.CurrentTimestamp()
sess, err := session.NewSession(aws.NewConfig(). sess, err := session.NewSession(aws.NewConfig().
WithRegion(cw.Region). WithRegion(cw.Region).
WithCredentials( WithCredentials(
credentials.NewStaticCredentials(cw.AwsAccessKeyId, cw.AwsSecretAccessKey, ""), credentials.NewStaticCredentials(cw.AwsAccessKeyId, cw.AwsSecretAccessKey, ""),
), ),
) )
if err != nil { if err != nil {
return err return err
} }
svc := cloudwatchlogs.New(sess) svc := cloudwatchlogs.New(sess)
filterOptions := new(cloudwatchlogs.FilterLogEventsInput). filterOptions := new(cloudwatchlogs.FilterLogEventsInput).
SetStartTime(startTs). // Inclusively both startTime and endTime SetStartTime(startTs). // Inclusively both startTime and endTime
// SetEndTime(endTs). // Default nil? // SetEndTime(endTs). // Default nil?
// SetLimit(10000). // Default 10000 // SetLimit(10000). // Default 10000
SetLogGroupName(cw.LogGroupName). SetLogGroupName(cw.LogGroupName).
@ -56,7 +53,7 @@ func (cw *cloudwatch) Request(c *client) error {
} }
if !reIsException.MatchString(*e.Message) { // too weak condition ? if !reIsException.MatchString(*e.Message) { // too weak condition ?
continue continue
} }
token, err := GetToken(*e.Message) token, err := GetToken(*e.Message)
if err != nil { if err != nil {
c.errChan <- err c.errChan <- err
@ -71,19 +68,19 @@ func (cw *cloudwatch) Request(c *client) error {
c.evChan <- &SessionErrorEvent{ c.evChan <- &SessionErrorEvent{
//SessionID: sessionID, //SessionID: sessionID,
Token: token, Token: token,
RawErrorEvent: &messages.RawErrorEvent{ IntegrationEvent: &messages.IntegrationEvent{
Source: "cloudwatch", Source: "cloudwatch",
Timestamp: timestamp, // e.IngestionTime ?? Timestamp: timestamp, // e.IngestionTime ??
Name: name, Name: name,
Payload: strings.ReplaceAll(e.String(), "\n", ""), Payload: strings.ReplaceAll(e.String(), "\n", ""),
}, },
} }
} }
if output.NextToken == nil { if output.NextToken == nil {
break; break
} }
filterOptions.NextToken = output.NextToken filterOptions.NextToken = output.NextToken
} }
return nil return nil
} }

View file

@ -115,7 +115,7 @@ func (d *datadog) Request(c *client) error {
c.evChan <- &SessionErrorEvent{ c.evChan <- &SessionErrorEvent{
//SessionID: sessionID, //SessionID: sessionID,
Token: token, Token: token,
RawErrorEvent: &messages.RawErrorEvent{ IntegrationEvent: &messages.IntegrationEvent{
Source: "datadog", Source: "datadog",
Timestamp: timestamp, Timestamp: timestamp,
Name: ddLog.Content.Attributes.Error.Message, Name: ddLog.Content.Attributes.Error.Message,

Some files were not shown because too many files have changed in this diff Show more