From d30fb2ac4f1a95fa47f020a0cce44ea09186d1a3 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Thu, 4 Aug 2022 12:19:00 +0200 Subject: [PATCH 001/592] change(ui) - react upgrade, dnd lib and other runtime fixes --- .../Session_/EventsBlock/EventsBlock.js | 5 ++- frontend/app/initialize.js | 26 ++++++------ frontend/package.json | 7 ++-- frontend/path-alias.js | 42 ++++++++++--------- 4 files changed, 42 insertions(+), 38 deletions(-) diff --git a/frontend/app/components/Session_/EventsBlock/EventsBlock.js b/frontend/app/components/Session_/EventsBlock/EventsBlock.js index e690ce3cc..cc6867fb9 100644 --- a/frontend/app/components/Session_/EventsBlock/EventsBlock.js +++ b/frontend/app/components/Session_/EventsBlock/EventsBlock.js @@ -52,8 +52,9 @@ export default class EventsBlock extends React.PureComponent { const { filter } = this.state; this.setState({ query: '' }) this.props.setEventFilter({ query: '', filter }) - - this.scroller.current.forceUpdateGrid(); + if (this.scroller.current) { + this.scroller.current.forceUpdateGrid(); + } setTimeout(() => { if (!this.scroller.current) return; diff --git a/frontend/app/initialize.js b/frontend/app/initialize.js index df7909916..3ca4972ac 100644 --- a/frontend/app/initialize.js +++ b/frontend/app/initialize.js @@ -1,25 +1,25 @@ import './styles/index.scss'; import React from 'react'; +import { createRoot } from 'react-dom/client'; import './init'; import { render } from 'react-dom'; import { Provider } from 'react-redux'; import store from './store'; import Router from './Router'; -import { StoreProvider, RootStore } from './mstore'; +import { StoreProvider, RootStore } from './mstore'; import { HTML5Backend } from 'react-dnd-html5-backend'; import { DndProvider } from 'react-dnd'; document.addEventListener('DOMContentLoaded', () => { - render( - ( - - - - - - - - ), - document.getElementById('app'), - ); + const container = document.getElementById('app'); + const root = createRoot(container); + root.render( + + + + + + + + ); }); diff --git a/frontend/package.json b/frontend/package.json index 8fb7f651f..d82ada89d 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -29,6 +29,7 @@ "jsbi": "^4.1.0", "jshint": "^2.11.1", "jspdf": "^2.5.1", + "jsx-runtime": "^1.2.0", "luxon": "^1.24.1", "mobx": "^6.3.8", "mobx-react-lite": "^3.1.6", @@ -36,14 +37,14 @@ "moment-range": "^3.0.3", "peerjs": "1.3.2", "rc-time-picker": "^3.7.3", - "react": "^16.14.0", + "react": "^18.2.0", "react-circular-progressbar": "^2.1.0", "react-confirm": "^0.1.27", "react-date-range": "^1.4.0", "react-daterange-picker": "^2.0.1", - "react-dnd": "^15.1.1", + "react-dnd": "^16.0.1", "react-dnd-html5-backend": "^15.1.2", - "react-dom": "^16.13.1", + "react-dom": "^18.2.0", "react-draggable": "^4.4.5", "react-google-recaptcha": "^1.1.0", "react-highlight": "^0.14.0", diff --git a/frontend/path-alias.js b/frontend/path-alias.js index 4dd1117e0..630e3868c 100644 --- a/frontend/path-alias.js +++ b/frontend/path-alias.js @@ -1,23 +1,25 @@ const path = require('path'); module.exports = { - "@": path.resolve(__dirname, "app"), - "App": path.resolve(__dirname, "app"), - "App/*": path.resolve(__dirname, "app/*"), - "SVG": path.resolve(__dirname, "app/svg"), - "SVG/*": path.resolve(__dirname, "app/svg/*"), - "Components": path.resolve(__dirname, "app/components"), - "Components/*": path.resolve(__dirname, "app/components/*"), - "Types": path.resolve(__dirname, "app/types" ), - "Types/*": path.resolve(__dirname, "app/types/*"), - "UI": path.resolve(__dirname, "app/components/ui"), - "UI/*": path.resolve(__dirname, "app/components/ui/*"), - "Duck": path.resolve(__dirname, "app/duck"), - "Duck/*": path.resolve(__dirname, "app/duck/*"), - "HOCs": path.resolve(__dirname, "app/components/hocs"), - "HOCs/*": path.resolve(__dirname, "app/components/hocs/*"), - "Shared": path.resolve(__dirname, "app/components/shared"), - "Shared/*": path.resolve(__dirname, "app/components/shared/*"), - "Player": path.resolve(__dirname, "app/player"), - "Player/*": path.resolve(__dirname, "app/player/*"), -}; + '@': path.resolve(__dirname, 'app'), + App: path.resolve(__dirname, 'app'), + 'App/*': path.resolve(__dirname, 'app/*'), + SVG: path.resolve(__dirname, 'app/svg'), + 'SVG/*': path.resolve(__dirname, 'app/svg/*'), + Components: path.resolve(__dirname, 'app/components'), + 'Components/*': path.resolve(__dirname, 'app/components/*'), + Types: path.resolve(__dirname, 'app/types'), + 'Types/*': path.resolve(__dirname, 'app/types/*'), + UI: path.resolve(__dirname, 'app/components/ui'), + 'UI/*': path.resolve(__dirname, 'app/components/ui/*'), + Duck: path.resolve(__dirname, 'app/duck'), + 'Duck/*': path.resolve(__dirname, 'app/duck/*'), + HOCs: path.resolve(__dirname, 'app/components/hocs'), + 'HOCs/*': path.resolve(__dirname, 'app/components/hocs/*'), + Shared: path.resolve(__dirname, 'app/components/shared'), + 'Shared/*': path.resolve(__dirname, 'app/components/shared/*'), + Player: path.resolve(__dirname, 'app/player'), + 'Player/*': path.resolve(__dirname, 'app/player/*'), + 'react/jsx-runtime.js': 'react/jsx-runtime', + 'react/jsx-dev-runtime.js': 'react/jsx-dev-runtime', +}; From de88092502e3e1a8911c89eebc2c0121eaa2b0de Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Thu, 4 Aug 2022 12:20:40 +0200 Subject: [PATCH 002/592] change(ui) - recharts update --- frontend/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/package.json b/frontend/package.json index d82ada89d..74d2acb03 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -58,7 +58,7 @@ "react-tippy": "^1.4.0", "react-toastify": "^9.0.3", "react-virtualized": "^9.22.3", - "recharts": "^2.1.10", + "recharts": "^2.1.13", "redux": "^4.0.5", "redux-immutable": "^4.0.0", "redux-thunk": "^2.3.0", From 7474db30fda2d137dbf4aed6d6974c1f05a623bd Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Thu, 4 Aug 2022 14:02:23 +0200 Subject: [PATCH 003/592] change(ui) - lib updates --- frontend/app/initialize.js | 4 +++- frontend/package.json | 6 +++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/frontend/app/initialize.js b/frontend/app/initialize.js index 3ca4972ac..d6dc1c2a8 100644 --- a/frontend/app/initialize.js +++ b/frontend/app/initialize.js @@ -17,7 +17,9 @@ document.addEventListener('DOMContentLoaded', () => { - + {/* */} + + {/* */} diff --git a/frontend/package.json b/frontend/package.json index 74d2acb03..22c97528f 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -33,13 +33,13 @@ "luxon": "^1.24.1", "mobx": "^6.3.8", "mobx-react-lite": "^3.1.6", - "moment": "^2.29.2", - "moment-range": "^3.0.3", + "moment": "^2.29.4", + "moment-range": "^4.0.2", "peerjs": "1.3.2", "rc-time-picker": "^3.7.3", "react": "^18.2.0", "react-circular-progressbar": "^2.1.0", - "react-confirm": "^0.1.27", + "react-confirm": "^0.2.3", "react-date-range": "^1.4.0", "react-daterange-picker": "^2.0.1", "react-dnd": "^16.0.1", From 7f048731d2709cc2602e81a289c00c4519e6e702 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Wed, 14 Sep 2022 19:01:17 +0530 Subject: [PATCH 004/592] fix(ui) - date picker --- .../app/components/shared/SelectDateRange/SelectDateRange.tsx | 3 ++- frontend/app/initialize.js | 1 - 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/app/components/shared/SelectDateRange/SelectDateRange.tsx b/frontend/app/components/shared/SelectDateRange/SelectDateRange.tsx index 7eaabc252..e7d8c8831 100644 --- a/frontend/app/components/shared/SelectDateRange/SelectDateRange.tsx +++ b/frontend/app/components/shared/SelectDateRange/SelectDateRange.tsx @@ -40,6 +40,7 @@ function SelectDateRange(props: Props) { const isCustomRange = period.rangeName === CUSTOM_RANGE; const customRange = isCustomRange ? period.rangeFormatted() : ''; + return (
+
{filter.sourceUnit}
); diff --git a/frontend/app/components/shared/Filters/FilterValue/FilterValue.tsx b/frontend/app/components/shared/Filters/FilterValue/FilterValue.tsx index 5638f9a1d..f9e9dea2e 100644 --- a/frontend/app/components/shared/Filters/FilterValue/FilterValue.tsx +++ b/frontend/app/components/shared/Filters/FilterValue/FilterValue.tsx @@ -93,7 +93,8 @@ function FilterValue(props: Props) { onChange(null, { value }, valueIndex)} /> @@ -106,6 +107,7 @@ function FilterValue(props: Props) { // multiple={true} value={value} // filter={filter} + placeholder={filter.placeholder} options={filter.options} onChange={({ value }) => onChange(null, value, valueIndex)} onAddValue={onAddValue} @@ -164,7 +166,7 @@ function FilterValue(props: Props) { endpoint="/events/search" params={getParms(filter.key)} headerText={''} - // placeholder={''} + placeholder={filter.placeholder} onSelect={(e, item) => onChange(e, item, valueIndex)} icon={filter.icon} /> diff --git a/frontend/app/components/shared/Filters/FilterValueDropdown/FilterValueDropdown.tsx b/frontend/app/components/shared/Filters/FilterValueDropdown/FilterValueDropdown.tsx index 13c40cbd8..28e1d23a1 100644 --- a/frontend/app/components/shared/Filters/FilterValueDropdown/FilterValueDropdown.tsx +++ b/frontend/app/components/shared/Filters/FilterValueDropdown/FilterValueDropdown.tsx @@ -71,6 +71,7 @@ const dropdownStyles = { interface Props { // filter: any; // event/filter // options: any[]; + placeholder?: string value: string; onChange: (value: any) => void; className?: string; @@ -84,7 +85,7 @@ interface Props { isMultilple?: boolean; } function FilterValueDropdown(props: Props) { - const { isMultilple = true, search = false, options, onChange, value, className = '', showCloseButton = true, showOrButton = true } = props; + const { placeholder = 'Select', isMultilple = true, search = false, options, onChange, value, className = '', showCloseButton = true, showOrButton = true } = props; // const options = [] return ( @@ -97,7 +98,7 @@ function FilterValueDropdown(props: Props) { name="issue_type" defaultValue={ value } onChange={ (value: any) => onChange(value.value) } - placeholder="Select" + placeholder={placeholder} styles={dropdownStyles} />
=', sourceUnit: 'ms', sourceType: FilterType.NUMBER, sourceOperatorOptions: filterOptions.customOperators }, - { key: FilterKey.LARGEST_CONTENTFUL_PAINT_TIME, type: FilterType.MULTIPLE, category: FilterCategory.PERFORMANCE, label: 'Largest Contentful Paint', operator: 'isAny', operatorOptions: filterOptions.stringOperatorsPerformance, source: [], icon: 'filters/lcpt', isEvent: true, hasSource: true, sourceOperator: '>=', sourceUnit: 'ms', sourceType: FilterType.NUMBER, sourceOperatorOptions: filterOptions.customOperators }, - { key: FilterKey.TTFB, type: FilterType.MULTIPLE, category: FilterCategory.PERFORMANCE, label: 'Time to First Byte', operator: 'isAny', operatorOptions: filterOptions.stringOperatorsPerformance, source: [], icon: 'filters/ttfb', isEvent: true, hasSource: true, sourceOperator: '>=', sourceUnit: 'ms', sourceType: FilterType.NUMBER, sourceOperatorOptions: filterOptions.customOperators }, - { key: FilterKey.AVG_CPU_LOAD, type: FilterType.MULTIPLE, category: FilterCategory.PERFORMANCE, label: 'Avg CPU Load', operator: 'isAny', operatorOptions: filterOptions.stringOperatorsPerformance, source: [], icon: 'filters/cpu-load', isEvent: true, hasSource: true, sourceOperator: '>=', sourceUnit: '%', sourceType: FilterType.NUMBER, sourceOperatorOptions: filterOptions.customOperators }, - { key: FilterKey.AVG_MEMORY_USAGE, type: FilterType.MULTIPLE, category: FilterCategory.PERFORMANCE, label: 'Avg Memory Usage', operator: 'isAny', operatorOptions: filterOptions.stringOperatorsPerformance, source: [], icon: 'filters/memory-load', isEvent: true, hasSource: true, sourceOperator: '>=', sourceUnit: 'mb', sourceType: FilterType.NUMBER, sourceOperatorOptions: filterOptions.customOperators }, - { key: FilterKey.FETCH_FAILED, type: FilterType.MULTIPLE, category: FilterCategory.PERFORMANCE, label: 'Failed Request', operator: 'isAny', operatorOptions: filterOptions.stringOperatorsPerformance, icon: 'filters/fetch-failed', isEvent: true }, - { key: FilterKey.ISSUE, type: FilterType.ISSUE, category: FilterCategory.JAVASCRIPT, label: 'Issue', operator: 'is', operatorOptions: filterOptions.getOperatorsByKeys(['is', 'isAny', 'isNot']), icon: 'filters/click', options: filterOptions.issueOptions }, + { key: FilterKey.DOM_COMPLETE, type: FilterType.MULTIPLE, category: FilterCategory.PERFORMANCE, label: 'DOM Complete', placeholder: 'Enter path', operator: 'isAny', operatorOptions: filterOptions.stringOperatorsPerformance, source: [], icon: 'filters/dom-complete', isEvent: true, hasSource: true, sourceOperator: '>=', sourcePlaceholder: 'E.g. 12', sourceUnit: 'ms', sourceType: FilterType.NUMBER, sourceOperatorOptions: filterOptions.customOperators }, + { key: FilterKey.LARGEST_CONTENTFUL_PAINT_TIME, type: FilterType.MULTIPLE, category: FilterCategory.PERFORMANCE, label: 'Largest Contentful Paint', placeholder: 'Enter path', operator: 'isAny', operatorOptions: filterOptions.stringOperatorsPerformance, source: [], icon: 'filters/lcpt', isEvent: true, hasSource: true, sourceOperator: '>=', sourcePlaceholder: 'E.g. 12', sourceUnit: 'ms', sourceType: FilterType.NUMBER, sourceOperatorOptions: filterOptions.customOperators }, + { key: FilterKey.TTFB, type: FilterType.MULTIPLE, category: FilterCategory.PERFORMANCE, label: 'Time to First Byte', placeholder: 'Enter path', operator: 'isAny', operatorOptions: filterOptions.stringOperatorsPerformance, source: [], icon: 'filters/ttfb', isEvent: true, hasSource: true, sourceOperator: '>=', sourceUnit: 'ms', sourceType: FilterType.NUMBER, sourceOperatorOptions: filterOptions.customOperators, sourcePlaceholder: 'E.g. 12', }, + { key: FilterKey.AVG_CPU_LOAD, type: FilterType.MULTIPLE, category: FilterCategory.PERFORMANCE, label: 'Avg CPU Load', placeholder: 'Enter path', operator: 'isAny', operatorOptions: filterOptions.stringOperatorsPerformance, source: [], icon: 'filters/cpu-load', isEvent: true, hasSource: true, sourceOperator: '>=', sourcePlaceholder: 'E.g. 12', sourceUnit: '%', sourceType: FilterType.NUMBER, sourceOperatorOptions: filterOptions.customOperators }, + { key: FilterKey.AVG_MEMORY_USAGE, type: FilterType.MULTIPLE, category: FilterCategory.PERFORMANCE, label: 'Avg Memory Usage', placeholder: 'Enter path', operator: 'isAny', operatorOptions: filterOptions.stringOperatorsPerformance, source: [], icon: 'filters/memory-load', isEvent: true, hasSource: true, sourceOperator: '>=', sourcePlaceholder: 'E.g. 12', sourceUnit: 'mb', sourceType: FilterType.NUMBER, sourceOperatorOptions: filterOptions.customOperators }, + { key: FilterKey.FETCH_FAILED, type: FilterType.MULTIPLE, category: FilterCategory.PERFORMANCE, label: 'Failed Request', placeholder: 'Enter path', operator: 'isAny', operatorOptions: filterOptions.stringOperatorsPerformance, icon: 'filters/fetch-failed', isEvent: true }, + { key: FilterKey.ISSUE, type: FilterType.ISSUE, category: FilterCategory.JAVASCRIPT, label: 'Issue', placeholder: 'Select an issue', operator: 'is', operatorOptions: filterOptions.getOperatorsByKeys(['is', 'isAny', 'isNot']), icon: 'filters/click', options: filterOptions.issueOptions }, ]; const mapFilters = (list) => { @@ -137,6 +137,7 @@ export default Record({ timestamp: 0, key: '', label: '', + placeholder: '', icon: '', type: '', value: [""], @@ -155,6 +156,7 @@ export default Record({ source: [""], sourceType: '', sourceOperator: '=', + sourcePlaceholder: '', sourceUnit: '', sourceOperatorOptions: [], From b0b99e4910f8cfe88dd9c1dcfc5582625bbb1d96 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Thu, 22 Sep 2022 15:42:47 +0530 Subject: [PATCH 039/592] change(ui) - search changes --- frontend/app/types/filter/newFilter.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/app/types/filter/newFilter.js b/frontend/app/types/filter/newFilter.js index cd65aa1f6..80fe80c3d 100644 --- a/frontend/app/types/filter/newFilter.js +++ b/frontend/app/types/filter/newFilter.js @@ -27,7 +27,7 @@ export const filters = [ { key: FilterKey.GRAPHQL_RESPONSE_BODY, type: FilterType.STRING, category: FilterCategory.PERFORMANCE, label: 'with response body', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/fetch' }, ]}, { key: FilterKey.STATEACTION, type: FilterType.MULTIPLE, category: FilterCategory.JAVASCRIPT, label: 'State Action', placeholder: 'E.g. 12', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/state-action', isEvent: true }, - { key: FilterKey.ERROR, type: FilterType.MULTIPLE, category: FilterCategory.JAVASCRIPT, label: 'Error', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/error', isEvent: true }, + { key: FilterKey.ERROR, type: FilterType.MULTIPLE, category: FilterCategory.JAVASCRIPT, label: 'Error Message', placeholder: 'E.g. ', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/error', isEvent: true }, // { key: FilterKey.METADATA, type: FilterType.MULTIPLE, category: FilterCategory.METADATA, label: 'Metadata', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/metadata', isEvent: true }, // FILTERS @@ -35,12 +35,12 @@ export const filters = [ { key: FilterKey.USER_BROWSER, type: FilterType.MULTIPLE, category: FilterCategory.GEAR, label: 'User Browser', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/browser' }, { key: FilterKey.USER_DEVICE, type: FilterType.MULTIPLE, category: FilterCategory.GEAR, label: 'User Device', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/device' }, { key: FilterKey.PLATFORM, type: FilterType.MULTIPLE_DROPDOWN, category: FilterCategory.GEAR, label: 'Platform', operator: 'is', operatorOptions: filterOptions.baseOperators, icon: 'filters/platform', options: platformOptions }, - { key: FilterKey.REVID, type: FilterType.MULTIPLE, category: FilterCategory.GEAR, label: 'Version ID', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'collection' }, + { key: FilterKey.REVID, type: FilterType.MULTIPLE, category: FilterCategory.GEAR, label: 'Version ID', placeholder: 'E.g. v1.0.8', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'collection' }, { key: FilterKey.REFERRER, type: FilterType.MULTIPLE, category: FilterCategory.RECORDING_ATTRIBUTES, label: 'Referrer', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/arrow-return-right' }, { key: FilterKey.DURATION, type: FilterType.DURATION, category: FilterCategory.RECORDING_ATTRIBUTES, label: 'Duration', operator: 'is', operatorOptions: filterOptions.getOperatorsByKeys(['is']), icon: 'filters/duration' }, { key: FilterKey.USER_COUNTRY, type: FilterType.MULTIPLE_DROPDOWN, category: FilterCategory.USER, label: 'User Country', operator: 'is', operatorOptions: filterOptions.getOperatorsByKeys(['is', 'isAny', 'isNot']), icon: 'filters/country', options: countryOptions }, // { key: FilterKey.CONSOLE, type: FilterType.MULTIPLE, category: FilterCategory.JAVASCRIPT, label: 'Console', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/console' }, - { key: FilterKey.USERID, type: FilterType.MULTIPLE, category: FilterCategory.USER, label: 'User Id', operator: 'is', operatorOptions: filterOptions.stringOperators.concat([{ label: 'is undefined', value: 'isUndefined'}]), icon: 'filters/userid' }, + { key: FilterKey.USERID, type: FilterType.MULTIPLE, category: FilterCategory.USER, label: 'Identifier (User ID, Name, Email, etc)', placeholder: 'E.g. Alex, or alex@domain.com, or EMP123', operator: 'is', operatorOptions: filterOptions.stringOperators.concat([{ label: 'is undefined', value: 'isUndefined'}]), icon: 'filters/userid' }, { key: FilterKey.USERANONYMOUSID, type: FilterType.MULTIPLE, category: FilterCategory.USER, label: 'User AnonymousId', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/userid' }, // PERFORMANCE From 17ef16406ee952e95576a81c6a63ed53f49ba504 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Thu, 22 Sep 2022 15:44:27 +0530 Subject: [PATCH 040/592] change(ui) - search changes --- frontend/app/types/filter/newFilter.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/types/filter/newFilter.js b/frontend/app/types/filter/newFilter.js index 80fe80c3d..675298ae6 100644 --- a/frontend/app/types/filter/newFilter.js +++ b/frontend/app/types/filter/newFilter.js @@ -27,7 +27,7 @@ export const filters = [ { key: FilterKey.GRAPHQL_RESPONSE_BODY, type: FilterType.STRING, category: FilterCategory.PERFORMANCE, label: 'with response body', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/fetch' }, ]}, { key: FilterKey.STATEACTION, type: FilterType.MULTIPLE, category: FilterCategory.JAVASCRIPT, label: 'State Action', placeholder: 'E.g. 12', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/state-action', isEvent: true }, - { key: FilterKey.ERROR, type: FilterType.MULTIPLE, category: FilterCategory.JAVASCRIPT, label: 'Error Message', placeholder: 'E.g. ', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/error', isEvent: true }, + { key: FilterKey.ERROR, type: FilterType.MULTIPLE, category: FilterCategory.JAVASCRIPT, label: 'Error Message', placeholder: 'E.g. Uncaught SyntaxError', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/error', isEvent: true }, // { key: FilterKey.METADATA, type: FilterType.MULTIPLE, category: FilterCategory.METADATA, label: 'Metadata', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/metadata', isEvent: true }, // FILTERS From 959cf44cad2c521b7fbea881925559649f323330 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 27 Sep 2022 13:35:32 +0530 Subject: [PATCH 041/592] remote dev pull and resolved conflicts --- .../Alerts/Notifications/Notifications.tsx | 2 +- frontend/app/components/Header/Header.js | 105 ++++++++++-------- .../components/Header/UserMenu/UserMenu.tsx | 53 +++++++++ .../app/components/Header/UserMenu/index.ts | 1 + .../app/components/Header/header.module.css | 5 +- .../components/Header/siteDropdown.module.css | 2 +- frontend/app/components/ui/SVG.tsx | 4 +- frontend/app/svg/icons/bell-fill.svg | 3 + frontend/app/svg/icons/gear-fill.svg | 3 + 9 files changed, 123 insertions(+), 55 deletions(-) create mode 100644 frontend/app/components/Header/UserMenu/UserMenu.tsx create mode 100644 frontend/app/components/Header/UserMenu/index.ts create mode 100644 frontend/app/svg/icons/bell-fill.svg create mode 100644 frontend/app/svg/icons/gear-fill.svg diff --git a/frontend/app/components/Alerts/Notifications/Notifications.tsx b/frontend/app/components/Alerts/Notifications/Notifications.tsx index d6327d530..1ba2ce49f 100644 --- a/frontend/app/components/Alerts/Notifications/Notifications.tsx +++ b/frontend/app/components/Alerts/Notifications/Notifications.tsx @@ -34,7 +34,7 @@ function Notifications(props: Props) {
{ count }
- +
)); diff --git a/frontend/app/components/Header/Header.js b/frontend/app/components/Header/Header.js index 6159197b1..429f5c80d 100644 --- a/frontend/app/components/Header/Header.js +++ b/frontend/app/components/Header/Header.js @@ -16,10 +16,11 @@ import { logout } from 'Duck/user'; import { Icon, Popup } from 'UI'; import SiteDropdown from './SiteDropdown'; import styles from './header.module.css'; -import OnboardingExplore from './OnboardingExplore/OnboardingExplore' +import OnboardingExplore from './OnboardingExplore/OnboardingExplore'; import Announcements from '../Announcements'; import Notifications from '../Alerts/Notifications'; import { init as initSite } from 'Duck/site'; +import { getInitials } from 'App/utils'; import ErrorGenPanel from 'App/dev/components'; import Alerts from '../Alerts/Alerts'; @@ -27,6 +28,7 @@ import AnimatedSVG, { ICONS } from '../shared/AnimatedSVG/AnimatedSVG'; import { fetchListActive as fetchMetadata } from 'Duck/customField'; import { useStore } from 'App/mstore'; import { useObserver } from 'mobx-react-lite'; +import UserMenu from './UserMenu'; const DASHBOARD_PATH = dashboard(); const ALERTS_PATH = alerts(); @@ -37,20 +39,24 @@ const CLIENT_PATH = client(CLIENT_DEFAULT_TAB); const Header = (props) => { const { - sites, location, account, - onLogoutClick, siteId, - boardingCompletion = 100, showAlerts = false, + sites, + location, + account, + onLogoutClick, + siteId, + boardingCompletion = 100, + showAlerts = false, } = props; - const name = account.get('name').split(" ")[0]; - const [hideDiscover, setHideDiscover] = useState(false) + const name = account.get('name').split(' ')[0]; + const [hideDiscover, setHideDiscover] = useState(false); const { userStore, notificationStore } = useStore(); const initialDataFetched = useObserver(() => userStore.initialDataFetched); let activeSite = null; const onAccountClick = () => { props.history.push(CLIENT_PATH); - } + }; useEffect(() => { if (!account.id || initialDataFetched) return; @@ -67,36 +73,38 @@ const Header = (props) => { }, [account]); useEffect(() => { - activeSite = sites.find(s => s.id == siteId); + activeSite = sites.find((s) => s.id == siteId); props.initSite(activeSite); - }, [siteId]) + }, [siteId]); return ( -
- +
+
-
v{window.env.VERSION}
+
+ v{window.env.VERSION} +
-
+ {/*
*/} - { 'Sessions' } + {'Sessions'} - { 'Assist' } + {'Assist'} { || location.pathname.includes(ALERTS_PATH) }} > - { 'Dashboards' } + {'Dashboards'} -
- -
+
+ {/* */} + {/*
*/} - { (boardingCompletion < 100 && !hideDiscover) && ( + {boardingCompletion < 100 && !hideDiscover && ( setHideDiscover(true)} /> -
)} -
- - + + + + -
-
+
-
{ name }
- +
+ {getInitials(name)} +
-
    -
  • -
  • -
+
- { } + {} {showAlerts && }
); }; -export default withRouter(connect( - state => ({ - account: state.getIn([ 'user', 'account' ]), - siteId: state.getIn([ 'site', 'siteId' ]), - sites: state.getIn([ 'site', 'list' ]), - showAlerts: state.getIn([ 'dashboard', 'showAlerts' ]), - boardingCompletion: state.getIn([ 'dashboard', 'boardingCompletion' ]) - }), - { onLogoutClick: logout, initSite, fetchMetadata }, -)(Header)); +export default withRouter( + connect( + (state) => ({ + account: state.getIn(['user', 'account']), + siteId: state.getIn(['site', 'siteId']), + sites: state.getIn(['site', 'list']), + showAlerts: state.getIn(['dashboard', 'showAlerts']), + boardingCompletion: state.getIn(['dashboard', 'boardingCompletion']), + }), + { onLogoutClick: logout, initSite, fetchMetadata } + )(Header) +); diff --git a/frontend/app/components/Header/UserMenu/UserMenu.tsx b/frontend/app/components/Header/UserMenu/UserMenu.tsx new file mode 100644 index 000000000..ee369cae7 --- /dev/null +++ b/frontend/app/components/Header/UserMenu/UserMenu.tsx @@ -0,0 +1,53 @@ +import React from 'react'; +import { withRouter } from 'react-router-dom'; +import { connect } from 'react-redux'; +import { logout } from 'Duck/user'; +import { client, CLIENT_DEFAULT_TAB } from 'App/routes'; +import cn from 'classnames'; + +const CLIENT_PATH = client(CLIENT_DEFAULT_TAB); + +interface Props { + history: any; + onLogoutClick: any; + className: string; +} +function UserMenu(props: Props) { + const onAccountClick = () => { + props.history.push(CLIENT_PATH); + }; + return ( +
+
+
+ SS +
+
+
User Name
+
Admin - admin@gmail.com
+
+
+
+ +
+
+ +
+
+ ); +} + +export default withRouter( + connect( + (state: any) => ({ + // account: state.getIn([ 'user', 'account' ]), + // siteId: state.getIn([ 'site', 'siteId' ]), + // sites: state.getIn([ 'site', 'list' ]), + // showAlerts: state.getIn([ 'dashboard', 'showAlerts' ]), + // boardingCompletion: state.getIn([ 'dashboard', 'boardingCompletion' ]) + }), + { onLogoutClick: logout } + )(UserMenu) +); + +// export default UserMenu; diff --git a/frontend/app/components/Header/UserMenu/index.ts b/frontend/app/components/Header/UserMenu/index.ts new file mode 100644 index 000000000..eeb4a1e9e --- /dev/null +++ b/frontend/app/components/Header/UserMenu/index.ts @@ -0,0 +1 @@ +export { default } from './UserMenu'; \ No newline at end of file diff --git a/frontend/app/components/Header/header.module.css b/frontend/app/components/Header/header.module.css index 9852b7436..f24561aa3 100644 --- a/frontend/app/components/Header/header.module.css +++ b/frontend/app/components/Header/header.module.css @@ -72,11 +72,10 @@ $height: 50px; .userDetails { display: flex; align-items: center; - justify-content: flex-end; + justify-content: center; position: relative; - padding: 0 5px 0 15px; + padding: 0 10px; transition: all 0.2s; - min-width: 100px; &:hover { background-color: $gray-lightest; diff --git a/frontend/app/components/Header/siteDropdown.module.css b/frontend/app/components/Header/siteDropdown.module.css index 886c60016..769f07f29 100644 --- a/frontend/app/components/Header/siteDropdown.module.css +++ b/frontend/app/components/Header/siteDropdown.module.css @@ -1,7 +1,7 @@ .wrapper { display: flex; align-items: center; - border-left: solid thin $gray-light !important; + /* border-left: solid thin $gray-light !important; */ padding: 10px 10px; min-width: 180px; justify-content: flex-start; diff --git a/frontend/app/components/ui/SVG.tsx b/frontend/app/components/ui/SVG.tsx index 23aa27e70..7ab78788c 100644 --- a/frontend/app/components/ui/SVG.tsx +++ b/frontend/app/components/ui/SVG.tsx @@ -1,7 +1,7 @@ import React from 'react'; -export type IconNames = 'alarm-clock' | 'alarm-plus' | 'all-sessions' | 'analytics' | 'anchor' | 'arrow-alt-square-right' | 'arrow-clockwise' | 'arrow-down' | 'arrow-right-short' | 'arrow-square-left' | 'arrow-square-right' | 'arrow-up' | 'arrows-angle-extend' | 'avatar/icn_bear' | 'avatar/icn_beaver' | 'avatar/icn_bird' | 'avatar/icn_bison' | 'avatar/icn_camel' | 'avatar/icn_chameleon' | 'avatar/icn_deer' | 'avatar/icn_dog' | 'avatar/icn_dolphin' | 'avatar/icn_elephant' | 'avatar/icn_fish' | 'avatar/icn_fox' | 'avatar/icn_gorilla' | 'avatar/icn_hippo' | 'avatar/icn_horse' | 'avatar/icn_hyena' | 'avatar/icn_kangaroo' | 'avatar/icn_lemur' | 'avatar/icn_mammel' | 'avatar/icn_monkey' | 'avatar/icn_moose' | 'avatar/icn_panda' | 'avatar/icn_penguin' | 'avatar/icn_porcupine' | 'avatar/icn_quail' | 'avatar/icn_rabbit' | 'avatar/icn_rhino' | 'avatar/icn_sea_horse' | 'avatar/icn_sheep' | 'avatar/icn_snake' | 'avatar/icn_squirrel' | 'avatar/icn_tapir' | 'avatar/icn_turtle' | 'avatar/icn_vulture' | 'avatar/icn_wild1' | 'avatar/icn_wild_bore' | 'ban' | 'bar-chart-line' | 'bar-pencil' | 'bell-plus' | 'bell' | 'binoculars' | 'book' | 'browser/browser' | 'browser/chrome' | 'browser/edge' | 'browser/electron' | 'browser/facebook' | 'browser/firefox' | 'browser/ie' | 'browser/opera' | 'browser/safari' | 'bullhorn' | 'business-time' | 'calendar-alt' | 'calendar-check' | 'calendar-day' | 'calendar' | 'call' | 'camera-alt' | 'camera-video-off' | 'camera-video' | 'camera' | 'caret-down-fill' | 'caret-left-fill' | 'caret-right-fill' | 'caret-up-fill' | 'chat-dots' | 'chat-right-text' | 'chat-square-quote' | 'check-circle' | 'check' | 'chevron-double-left' | 'chevron-double-right' | 'chevron-down' | 'chevron-left' | 'chevron-right' | 'chevron-up' | 'circle-fill' | 'circle' | 'clipboard-list-check' | 'clock' | 'close' | 'cloud-fog2-fill' | 'code' | 'cog' | 'cogs' | 'collection' | 'columns-gap-filled' | 'columns-gap' | 'console/error' | 'console/exception' | 'console/info' | 'console/warning' | 'console' | 'controller' | 'cookies' | 'copy' | 'credit-card-front' | 'cubes' | 'dashboard-icn' | 'desktop' | 'device' | 'diagram-3' | 'dizzy' | 'doublecheck' | 'download' | 'drag' | 'edit' | 'ellipsis-v' | 'enter' | 'envelope' | 'errors-icon' | 'event/click' | 'event/clickrage' | 'event/code' | 'event/i-cursor' | 'event/input' | 'event/link' | 'event/location' | 'event/resize' | 'event/view' | 'exclamation-circle' | 'expand-wide' | 'explosion' | 'external-link-alt' | 'eye-slash-fill' | 'eye-slash' | 'eye' | 'fetch' | 'file-code' | 'file-medical-alt' | 'file' | 'filter' | 'filters/arrow-return-right' | 'filters/browser' | 'filters/click' | 'filters/clickrage' | 'filters/code' | 'filters/console' | 'filters/country' | 'filters/cpu-load' | 'filters/custom' | 'filters/device' | 'filters/dom-complete' | 'filters/duration' | 'filters/error' | 'filters/fetch-failed' | 'filters/fetch' | 'filters/file-code' | 'filters/graphql' | 'filters/i-cursor' | 'filters/input' | 'filters/lcpt' | 'filters/link' | 'filters/location' | 'filters/memory-load' | 'filters/metadata' | 'filters/os' | 'filters/perfromance-network-request' | 'filters/platform' | 'filters/referrer' | 'filters/resize' | 'filters/rev-id' | 'filters/state-action' | 'filters/ttfb' | 'filters/user-alt' | 'filters/userid' | 'filters/view' | 'flag-na' | 'fullscreen' | 'funnel/cpu-fill' | 'funnel/cpu' | 'funnel/dizzy' | 'funnel/emoji-angry-fill' | 'funnel/emoji-angry' | 'funnel/emoji-dizzy-fill' | 'funnel/exclamation-circle-fill' | 'funnel/exclamation-circle' | 'funnel/file-earmark-break-fill' | 'funnel/file-earmark-break' | 'funnel/file-earmark-minus-fill' | 'funnel/file-earmark-minus' | 'funnel/file-medical-alt' | 'funnel/file-x' | 'funnel/hdd-fill' | 'funnel/hourglass-top' | 'funnel/image-fill' | 'funnel/image' | 'funnel/microchip' | 'funnel/mouse' | 'funnel/patch-exclamation-fill' | 'funnel/sd-card' | 'funnel-fill' | 'funnel-new' | 'funnel' | 'geo-alt-fill-custom' | 'github' | 'graph-up-arrow' | 'graph-up' | 'grid-3x3' | 'grid-check' | 'grid-horizontal' | 'grip-horizontal' | 'hash' | 'hdd-stack' | 'headset' | 'heart-rate' | 'high-engagement' | 'history' | 'hourglass-start' | 'id-card' | 'image' | 'info-circle-fill' | 'info-circle' | 'info-square' | 'info' | 'inspect' | 'integrations/assist' | 'integrations/bugsnag-text' | 'integrations/bugsnag' | 'integrations/cloudwatch-text' | 'integrations/cloudwatch' | 'integrations/datadog' | 'integrations/elasticsearch-text' | 'integrations/elasticsearch' | 'integrations/github' | 'integrations/graphql' | 'integrations/jira-text' | 'integrations/jira' | 'integrations/mobx' | 'integrations/newrelic-text' | 'integrations/newrelic' | 'integrations/ngrx' | 'integrations/openreplay-text' | 'integrations/openreplay' | 'integrations/redux' | 'integrations/rollbar-text' | 'integrations/rollbar' | 'integrations/segment' | 'integrations/sentry-text' | 'integrations/sentry' | 'integrations/slack-bw' | 'integrations/slack' | 'integrations/stackdriver' | 'integrations/sumologic-text' | 'integrations/sumologic' | 'integrations/vuejs' | 'journal-code' | 'layer-group' | 'lightbulb-on' | 'lightbulb' | 'link-45deg' | 'list-alt' | 'list-ul' | 'list' | 'lock-alt' | 'map-marker-alt' | 'memory' | 'mic-mute' | 'mic' | 'minus' | 'mobile' | 'mouse-alt' | 'next1' | 'no-dashboard' | 'no-metrics-chart' | 'no-metrics' | 'os/android' | 'os/chrome_os' | 'os/fedora' | 'os/ios' | 'os/linux' | 'os/mac_os_x' | 'os/other' | 'os/ubuntu' | 'os/windows' | 'os' | 'pause-fill' | 'pause' | 'pdf-download' | 'pencil-stop' | 'pencil' | 'percent' | 'performance-icon' | 'person-fill' | 'person' | 'pie-chart-fill' | 'pin-fill' | 'play-circle-light' | 'play-circle' | 'play-fill-new' | 'play-fill' | 'play-hover' | 'play' | 'plus-circle' | 'plus' | 'prev1' | 'puzzle-piece' | 'question-circle' | 'question-lg' | 'quote-left' | 'quote-right' | 'redo-back' | 'redo' | 'remote-control' | 'replay-10' | 'resources-icon' | 'safe-fill' | 'safe' | 'sandglass' | 'search' | 'search_notification' | 'server' | 'share-alt' | 'shield-lock' | 'signup' | 'skip-forward-fill' | 'skip-forward' | 'slack' | 'slash-circle' | 'sliders' | 'social/slack' | 'social/trello' | 'spinner' | 'star-solid' | 'star' | 'step-forward' | 'stopwatch' | 'store' | 'sync-alt' | 'table-new' | 'table' | 'tablet-android' | 'tachometer-slow' | 'tachometer-slowest' | 'tags' | 'team-funnel' | 'telephone-fill' | 'telephone' | 'text-paragraph' | 'tools' | 'trash' | 'turtle' | 'user-alt' | 'user-circle' | 'user-friends' | 'users' | 'vendors/graphql' | 'vendors/mobx' | 'vendors/ngrx' | 'vendors/redux' | 'vendors/vuex' | 'web-vitals' | 'wifi' | 'window-alt' | 'window-restore' | 'window-x' | 'window' | 'zoom-in'; +export type IconNames = 'alarm-clock' | 'alarm-plus' | 'all-sessions' | 'analytics' | 'anchor' | 'arrow-alt-square-right' | 'arrow-clockwise' | 'arrow-down' | 'arrow-right-short' | 'arrow-square-left' | 'arrow-square-right' | 'arrow-up' | 'arrows-angle-extend' | 'avatar/icn_bear' | 'avatar/icn_beaver' | 'avatar/icn_bird' | 'avatar/icn_bison' | 'avatar/icn_camel' | 'avatar/icn_chameleon' | 'avatar/icn_deer' | 'avatar/icn_dog' | 'avatar/icn_dolphin' | 'avatar/icn_elephant' | 'avatar/icn_fish' | 'avatar/icn_fox' | 'avatar/icn_gorilla' | 'avatar/icn_hippo' | 'avatar/icn_horse' | 'avatar/icn_hyena' | 'avatar/icn_kangaroo' | 'avatar/icn_lemur' | 'avatar/icn_mammel' | 'avatar/icn_monkey' | 'avatar/icn_moose' | 'avatar/icn_panda' | 'avatar/icn_penguin' | 'avatar/icn_porcupine' | 'avatar/icn_quail' | 'avatar/icn_rabbit' | 'avatar/icn_rhino' | 'avatar/icn_sea_horse' | 'avatar/icn_sheep' | 'avatar/icn_snake' | 'avatar/icn_squirrel' | 'avatar/icn_tapir' | 'avatar/icn_turtle' | 'avatar/icn_vulture' | 'avatar/icn_wild1' | 'avatar/icn_wild_bore' | 'ban' | 'bar-chart-line' | 'bar-pencil' | 'bell-fill' | 'bell-plus' | 'bell' | 'binoculars' | 'book' | 'browser/browser' | 'browser/chrome' | 'browser/edge' | 'browser/electron' | 'browser/facebook' | 'browser/firefox' | 'browser/ie' | 'browser/opera' | 'browser/safari' | 'bullhorn' | 'business-time' | 'calendar-alt' | 'calendar-check' | 'calendar-day' | 'calendar' | 'call' | 'camera-alt' | 'camera-video-off' | 'camera-video' | 'camera' | 'caret-down-fill' | 'caret-left-fill' | 'caret-right-fill' | 'caret-up-fill' | 'chat-dots' | 'chat-right-text' | 'chat-square-quote' | 'check-circle' | 'check' | 'chevron-double-left' | 'chevron-double-right' | 'chevron-down' | 'chevron-left' | 'chevron-right' | 'chevron-up' | 'circle-fill' | 'circle' | 'clipboard-list-check' | 'clock' | 'close' | 'cloud-fog2-fill' | 'code' | 'cog' | 'cogs' | 'collection' | 'columns-gap-filled' | 'columns-gap' | 'console/error' | 'console/exception' | 'console/info' | 'console/warning' | 'console' | 'controller' | 'cookies' | 'copy' | 'credit-card-front' | 'cubes' | 'dashboard-icn' | 'desktop' | 'device' | 'diagram-3' | 'dizzy' | 'doublecheck' | 'download' | 'drag' | 'edit' | 'ellipsis-v' | 'enter' | 'envelope' | 'errors-icon' | 'event/click' | 'event/clickrage' | 'event/code' | 'event/i-cursor' | 'event/input' | 'event/link' | 'event/location' | 'event/resize' | 'event/view' | 'exclamation-circle' | 'expand-wide' | 'explosion' | 'external-link-alt' | 'eye-slash-fill' | 'eye-slash' | 'eye' | 'fetch' | 'file-code' | 'file-medical-alt' | 'file' | 'filter' | 'filters/arrow-return-right' | 'filters/browser' | 'filters/click' | 'filters/clickrage' | 'filters/code' | 'filters/console' | 'filters/country' | 'filters/cpu-load' | 'filters/custom' | 'filters/device' | 'filters/dom-complete' | 'filters/duration' | 'filters/error' | 'filters/fetch-failed' | 'filters/fetch' | 'filters/file-code' | 'filters/graphql' | 'filters/i-cursor' | 'filters/input' | 'filters/lcpt' | 'filters/link' | 'filters/location' | 'filters/memory-load' | 'filters/metadata' | 'filters/os' | 'filters/perfromance-network-request' | 'filters/platform' | 'filters/referrer' | 'filters/resize' | 'filters/rev-id' | 'filters/state-action' | 'filters/ttfb' | 'filters/user-alt' | 'filters/userid' | 'filters/view' | 'flag-na' | 'fullscreen' | 'funnel/cpu-fill' | 'funnel/cpu' | 'funnel/dizzy' | 'funnel/emoji-angry-fill' | 'funnel/emoji-angry' | 'funnel/emoji-dizzy-fill' | 'funnel/exclamation-circle-fill' | 'funnel/exclamation-circle' | 'funnel/file-earmark-break-fill' | 'funnel/file-earmark-break' | 'funnel/file-earmark-minus-fill' | 'funnel/file-earmark-minus' | 'funnel/file-medical-alt' | 'funnel/file-x' | 'funnel/hdd-fill' | 'funnel/hourglass-top' | 'funnel/image-fill' | 'funnel/image' | 'funnel/microchip' | 'funnel/mouse' | 'funnel/patch-exclamation-fill' | 'funnel/sd-card' | 'funnel-fill' | 'funnel-new' | 'funnel' | 'gear-fill' | 'geo-alt-fill-custom' | 'github' | 'graph-up-arrow' | 'graph-up' | 'grid-3x3' | 'grid-check' | 'grid-horizontal' | 'grip-horizontal' | 'hash' | 'hdd-stack' | 'headset' | 'heart-rate' | 'high-engagement' | 'history' | 'hourglass-start' | 'id-card' | 'image' | 'info-circle-fill' | 'info-circle' | 'info-square' | 'info' | 'inspect' | 'integrations/assist' | 'integrations/bugsnag-text' | 'integrations/bugsnag' | 'integrations/cloudwatch-text' | 'integrations/cloudwatch' | 'integrations/datadog' | 'integrations/elasticsearch-text' | 'integrations/elasticsearch' | 'integrations/github' | 'integrations/graphql' | 'integrations/jira-text' | 'integrations/jira' | 'integrations/mobx' | 'integrations/newrelic-text' | 'integrations/newrelic' | 'integrations/ngrx' | 'integrations/openreplay-text' | 'integrations/openreplay' | 'integrations/redux' | 'integrations/rollbar-text' | 'integrations/rollbar' | 'integrations/segment' | 'integrations/sentry-text' | 'integrations/sentry' | 'integrations/slack-bw' | 'integrations/slack' | 'integrations/stackdriver' | 'integrations/sumologic-text' | 'integrations/sumologic' | 'integrations/vuejs' | 'journal-code' | 'layer-group' | 'lightbulb-on' | 'lightbulb' | 'link-45deg' | 'list-alt' | 'list-ul' | 'list' | 'lock-alt' | 'map-marker-alt' | 'memory' | 'mic-mute' | 'mic' | 'minus' | 'mobile' | 'mouse-alt' | 'next1' | 'no-dashboard' | 'no-metrics-chart' | 'no-metrics' | 'os/android' | 'os/chrome_os' | 'os/fedora' | 'os/ios' | 'os/linux' | 'os/mac_os_x' | 'os/other' | 'os/ubuntu' | 'os/windows' | 'os' | 'pause-fill' | 'pause' | 'pdf-download' | 'pencil-stop' | 'pencil' | 'percent' | 'performance-icon' | 'person-fill' | 'person' | 'pie-chart-fill' | 'pin-fill' | 'play-circle-light' | 'play-circle' | 'play-fill-new' | 'play-fill' | 'play-hover' | 'play' | 'plus-circle' | 'plus' | 'prev1' | 'puzzle-piece' | 'question-circle' | 'question-lg' | 'quote-left' | 'quote-right' | 'redo-back' | 'redo' | 'remote-control' | 'replay-10' | 'resources-icon' | 'safe-fill' | 'safe' | 'sandglass' | 'search' | 'search_notification' | 'server' | 'share-alt' | 'shield-lock' | 'signup' | 'skip-forward-fill' | 'skip-forward' | 'slack' | 'slash-circle' | 'sliders' | 'social/slack' | 'social/trello' | 'spinner' | 'star-solid' | 'star' | 'step-forward' | 'stopwatch' | 'store' | 'sync-alt' | 'table-new' | 'table' | 'tablet-android' | 'tachometer-slow' | 'tachometer-slowest' | 'tags' | 'team-funnel' | 'telephone-fill' | 'telephone' | 'text-paragraph' | 'tools' | 'trash' | 'turtle' | 'user-alt' | 'user-circle' | 'user-friends' | 'users' | 'vendors/graphql' | 'vendors/mobx' | 'vendors/ngrx' | 'vendors/redux' | 'vendors/vuex' | 'web-vitals' | 'wifi' | 'window-alt' | 'window-restore' | 'window-x' | 'window' | 'zoom-in'; interface Props { name: IconNames; @@ -67,6 +67,7 @@ const SVG = (props: Props) => { case 'ban': return ; case 'bar-chart-line': return ; case 'bar-pencil': return ; + case 'bell-fill': return ; case 'bell-plus': return ; case 'bell': return ; case 'binoculars': return ; @@ -224,6 +225,7 @@ const SVG = (props: Props) => { case 'funnel-fill': return ; case 'funnel-new': return ; case 'funnel': return ; + case 'gear-fill': return ; case 'geo-alt-fill-custom': return ; case 'github': return ; case 'graph-up-arrow': return ; diff --git a/frontend/app/svg/icons/bell-fill.svg b/frontend/app/svg/icons/bell-fill.svg new file mode 100644 index 000000000..b5fa6a05b --- /dev/null +++ b/frontend/app/svg/icons/bell-fill.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/frontend/app/svg/icons/gear-fill.svg b/frontend/app/svg/icons/gear-fill.svg new file mode 100644 index 000000000..ac2a31086 --- /dev/null +++ b/frontend/app/svg/icons/gear-fill.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file From d22fa1efc16ce16cf55065e2cd8d86d3d4f19879 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 23 Sep 2022 19:31:05 +0530 Subject: [PATCH 042/592] fix(ui) - error parse --- frontend/app/api_middleware.js | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/frontend/app/api_middleware.js b/frontend/app/api_middleware.js index 783ebe8c3..1846a9dbc 100644 --- a/frontend/app/api_middleware.js +++ b/frontend/app/api_middleware.js @@ -2,27 +2,27 @@ import logger from 'App/logger'; import APIClient from './api_client'; import { UPDATE, DELETE } from './duck/jwt'; -export default store => next => (action) => { +export default (store) => (next) => (action) => { const { types, call, ...rest } = action; if (!call) { return next(action); } - const [ REQUEST, SUCCESS, FAILURE ] = types; + const [REQUEST, SUCCESS, FAILURE] = types; next({ ...rest, type: REQUEST }); const client = new APIClient(); return call(client) - .then(async response => { + .then(async (response) => { if (response.status === 403) { next({ type: DELETE }); } if (!response.ok) { - const text = await response.text() + const text = await response.text(); return Promise.reject(text); } - return response.json() + return response.json(); }) - .then(json => json || {}) // TEMP TODO on server: no empty responces + .then((json) => json || {}) // TEMP TODO on server: no empty responces .then(({ jwt, errors, data }) => { if (errors) { next({ type: FAILURE, errors, data }); @@ -34,14 +34,22 @@ export default store => next => (action) => { } }) .catch((e) => { - logger.error("Error during API request. ", e) - return next({ type: FAILURE, errors: JSON.parse(e).errors || [] }); + logger.error('Error during API request. ', e); + return next({ type: FAILURE, errors: parseError(e) }); }); }; +function parseError(e) { + try { + return JSON.parse(e).errors || []; + } catch { + return e; + } +} + function jwtExpired(token) { try { - const base64Url = token.split('.')[ 1 ]; + const base64Url = token.split('.')[1]; const base64 = base64Url.replace('-', '+').replace('_', '/'); const tokenObj = JSON.parse(window.atob(base64)); return tokenObj.exp * 1000 < Date.now(); // exp in Unix time (sec) From 9b6be7e580442962eb9ff0d33630deef3903a2c5 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 27 Sep 2022 13:37:39 +0530 Subject: [PATCH 043/592] remote dev pull and resolved conflicts --- .../DefaultMenuView/DefaultMenuView.tsx | 71 +++++++++++++++++++ .../Header/DefaultMenuView/index.ts | 1 + frontend/app/components/Header/Header.js | 71 ++++++------------- .../NewProjectButton/NewProjectButton.tsx | 18 +++-- .../PreferencesView/PreferencesView.tsx | 28 ++++++++ .../Header/PreferencesView/index.ts | 1 + .../Header/SettingsMenu/SettingsMenu.tsx | 66 +++++++++++++++++ .../components/Header/SettingsMenu/index.ts | 1 + .../app/components/Header/SiteDropdown.js | 22 ++---- .../components/Header/UserMenu/UserMenu.tsx | 50 +++++++------ .../app/components/Header/header.module.css | 16 ++--- .../components/Header/siteDropdown.module.css | 18 +++-- frontend/app/components/ui/SVG.tsx | 8 ++- frontend/app/svg/icons/arrow-bar-left.svg | 3 + frontend/app/svg/icons/bell-slash.svg | 3 + frontend/app/svg/icons/door-closed.svg | 4 ++ frontend/app/svg/icons/folder-plus.svg | 4 ++ frontend/app/svg/icons/folder2.svg | 3 + frontend/app/svg/icons/puzzle.svg | 3 + 19 files changed, 282 insertions(+), 109 deletions(-) create mode 100644 frontend/app/components/Header/DefaultMenuView/DefaultMenuView.tsx create mode 100644 frontend/app/components/Header/DefaultMenuView/index.ts create mode 100644 frontend/app/components/Header/PreferencesView/PreferencesView.tsx create mode 100644 frontend/app/components/Header/PreferencesView/index.ts create mode 100644 frontend/app/components/Header/SettingsMenu/SettingsMenu.tsx create mode 100644 frontend/app/components/Header/SettingsMenu/index.ts create mode 100644 frontend/app/svg/icons/arrow-bar-left.svg create mode 100644 frontend/app/svg/icons/bell-slash.svg create mode 100644 frontend/app/svg/icons/door-closed.svg create mode 100644 frontend/app/svg/icons/folder-plus.svg create mode 100644 frontend/app/svg/icons/folder2.svg create mode 100644 frontend/app/svg/icons/puzzle.svg diff --git a/frontend/app/components/Header/DefaultMenuView/DefaultMenuView.tsx b/frontend/app/components/Header/DefaultMenuView/DefaultMenuView.tsx new file mode 100644 index 000000000..a8b321c8c --- /dev/null +++ b/frontend/app/components/Header/DefaultMenuView/DefaultMenuView.tsx @@ -0,0 +1,71 @@ +import React from 'react'; +import { NavLink, withRouter } from 'react-router-dom'; +import { + sessions, + metrics, + assist, + client, + dashboard, + withSiteId, + CLIENT_DEFAULT_TAB, +} from 'App/routes'; +import SiteDropdown from '../SiteDropdown'; +import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG'; +import styles from '../header.module.css'; + +const DASHBOARD_PATH = dashboard(); +const METRICS_PATH = metrics(); +const SESSIONS_PATH = sessions(); +const ASSIST_PATH = assist(); + +interface Props { + siteId: any; +} +function DefaultMenuView(props: Props) { + const { siteId } = props; + return ( +
+ +
+
+ +
+
+ v{window.env.VERSION} +
+
+
+ + {/*
*/} + + + {'Sessions'} + + + {'Assist'} + + { + return ( + location.pathname.includes(DASHBOARD_PATH) || location.pathname.includes(METRICS_PATH) + ); + }} + > + {'Dashboards'} + +
+ ); +} + +export default DefaultMenuView; diff --git a/frontend/app/components/Header/DefaultMenuView/index.ts b/frontend/app/components/Header/DefaultMenuView/index.ts new file mode 100644 index 000000000..8b21e4cfb --- /dev/null +++ b/frontend/app/components/Header/DefaultMenuView/index.ts @@ -0,0 +1 @@ +export { default } from './DefaultMenuView' \ No newline at end of file diff --git a/frontend/app/components/Header/Header.js b/frontend/app/components/Header/Header.js index 429f5c80d..bd89b176d 100644 --- a/frontend/app/components/Header/Header.js +++ b/frontend/app/components/Header/Header.js @@ -29,6 +29,9 @@ import { fetchListActive as fetchMetadata } from 'Duck/customField'; import { useStore } from 'App/mstore'; import { useObserver } from 'mobx-react-lite'; import UserMenu from './UserMenu'; +import SettingsMenu from './SettingsMenu'; +import DefaultMenuView from './DefaultMenuView'; +import PreferencesView from './PreferencesView'; const DASHBOARD_PATH = dashboard(); const ALERTS_PATH = alerts(); @@ -48,11 +51,12 @@ const Header = (props) => { showAlerts = false, } = props; - const name = account.get('name').split(' ')[0]; + const name = account.get('name'); const [hideDiscover, setHideDiscover] = useState(false); const { userStore, notificationStore } = useStore(); const initialDataFetched = useObserver(() => userStore.initialDataFetched); let activeSite = null; + const isPreferences = window.location.pathname.includes('/client/'); const onAccountClick = () => { props.history.push(CLIENT_PATH); @@ -78,50 +82,13 @@ const Header = (props) => { }, [siteId]); return ( -
- -
-
- -
-
- v{window.env.VERSION} -
-
-
- - {/*
*/} - - - {'Sessions'} - - - {'Assist'} - - { - return location.pathname.includes(DASHBOARD_PATH) - || location.pathname.includes(METRICS_PATH) - || location.pathname.includes(ALERTS_PATH) - }} - > - {'Dashboards'} - +
+ {!isPreferences && } + {isPreferences && }
- {/* */} - {/*
*/} - {boardingCompletion < 100 && !hideDiscover && ( setHideDiscover(true)} /> @@ -129,10 +96,14 @@ const Header = (props) => { )} - - - - + +
+ + + + + +
@@ -144,8 +115,10 @@ const Header = (props) => {
+ + {}
- {} + {showAlerts && }
); diff --git a/frontend/app/components/Header/NewProjectButton/NewProjectButton.tsx b/frontend/app/components/Header/NewProjectButton/NewProjectButton.tsx index 9c438c50f..a0dd30244 100644 --- a/frontend/app/components/Header/NewProjectButton/NewProjectButton.tsx +++ b/frontend/app/components/Header/NewProjectButton/NewProjectButton.tsx @@ -24,13 +24,17 @@ function NewProjectButton(props: Props) { }; return ( -
- - Add New Project -
+
  • + + Add Project +
  • + //
    + // + // Add New Project + //
    ); } diff --git a/frontend/app/components/Header/PreferencesView/PreferencesView.tsx b/frontend/app/components/Header/PreferencesView/PreferencesView.tsx new file mode 100644 index 000000000..0ba533350 --- /dev/null +++ b/frontend/app/components/Header/PreferencesView/PreferencesView.tsx @@ -0,0 +1,28 @@ +import React from 'react'; +import { Icon } from 'UI'; +import { withRouter } from 'react-router-dom'; +import ProjectCodeSnippet from 'App/components/Onboarding/components/OnboardingTabs/ProjectCodeSnippet'; + +interface Props { + history: any; +} +function PreferencesView(props: Props) { + const onExit = () => { + props.history.push('/'); + }; + return ( + <> +
    + + Exit Preferences +
    + +
    + + Changes applied at organization level +
    + + ); +} + +export default withRouter(PreferencesView); diff --git a/frontend/app/components/Header/PreferencesView/index.ts b/frontend/app/components/Header/PreferencesView/index.ts new file mode 100644 index 000000000..774801dcc --- /dev/null +++ b/frontend/app/components/Header/PreferencesView/index.ts @@ -0,0 +1 @@ +export { default } from './PreferencesView'; \ No newline at end of file diff --git a/frontend/app/components/Header/SettingsMenu/SettingsMenu.tsx b/frontend/app/components/Header/SettingsMenu/SettingsMenu.tsx new file mode 100644 index 000000000..58bc09b8c --- /dev/null +++ b/frontend/app/components/Header/SettingsMenu/SettingsMenu.tsx @@ -0,0 +1,66 @@ +import React from 'react'; +import cn from 'classnames'; +import { Icon } from 'UI'; +import { CLIENT_TABS, client as clientRoute } from 'App/routes'; +import { withRouter, RouteComponentProps } from 'react-router'; + +interface Props { + history: any; + className: string; + account: any; +} +function SettingsMenu(props: RouteComponentProps) { + const { history, account, className }: any = props; + const isAdmin = account.admin || account.superAdmin; + const navigateTo = (path: any) => { + switch (path) { + case 'projects': + return history.push(clientRoute(CLIENT_TABS.SITES)); + case 'team': + return history.push(clientRoute(CLIENT_TABS.MANAGE_USERS)); + case 'metadata': + return history.push(clientRoute(CLIENT_TABS.CUSTOM_FIELDS)); + case 'webhooks': + return history.push(clientRoute(CLIENT_TABS.WEBHOOKS)); + case 'integrations': + return history.push(clientRoute(CLIENT_TABS.INTEGRATIONS)); + case 'notifications': + return history.push(clientRoute(CLIENT_TABS.NOTIFICATIONS)); + } + }; + return ( +
    + {isAdmin && ( + <> + navigateTo('projects')} label="Projects" icon="folder2" /> + navigateTo('team')} label="Team" icon="users" /> + + )} + navigateTo('metadata')} label="Metadata" icon="tags" /> + navigateTo('webhooks')} label="Webhooks" icon="link-45deg" /> + navigateTo('integrations')} label="Integrations" icon="puzzle" /> + navigateTo('notifications')} + label="Notifications" + icon="bell-slash" + /> +
    + ); +} + +export default withRouter(SettingsMenu); + +function MenuItem({ onClick, label, icon }: any) { + return ( +
    + + +
    + ); +} diff --git a/frontend/app/components/Header/SettingsMenu/index.ts b/frontend/app/components/Header/SettingsMenu/index.ts new file mode 100644 index 000000000..0133de70a --- /dev/null +++ b/frontend/app/components/Header/SettingsMenu/index.ts @@ -0,0 +1 @@ +export { default } from './SettingsMenu'; \ No newline at end of file diff --git a/frontend/app/components/Header/SiteDropdown.js b/frontend/app/components/Header/SiteDropdown.js index 7a0205be3..90d024910 100644 --- a/frontend/app/components/Header/SiteDropdown.js +++ b/frontend/app/components/Header/SiteDropdown.js @@ -3,7 +3,6 @@ import { connect } from 'react-redux'; import { setSiteId } from 'Duck/site'; import { withRouter } from 'react-router-dom'; import { hasSiteId, siteChangeAvaliable } from 'App/routes'; -import { STATUS_COLOR_MAP, GREEN } from 'Types/site'; import { Icon } from 'UI'; import { pushNewSite } from 'Duck/user'; import { init } from 'Duck/site'; @@ -13,7 +12,6 @@ import { clearSearch } from 'Duck/search'; import { clearSearch as clearSearchLive } from 'Duck/liveSearch'; import { fetchListActive as fetchIntegrationVariables } from 'Duck/customField'; import { withStore } from 'App/mstore'; -import AnimatedSVG, { ICONS } from '../shared/AnimatedSVG/AnimatedSVG'; import NewProjectButton from './NewProjectButton'; @withStore @@ -63,37 +61,27 @@ export default class SiteDropdown extends React.PureComponent { account, location: { pathname }, } = this.props; - const { showProductModal } = this.state; const isAdmin = account.admin || account.superAdmin; const activeSite = sites.find((s) => s.id == siteId); const disabled = !siteChangeAvaliable(pathname); const showCurrent = hasSiteId(pathname) || siteChangeAvaliable(pathname); - // const canAddSites = isAdmin && account.limits.projects && account.limits.projects.remaining !== 0; return (
    - {showCurrent ? ( - activeSite && activeSite.status === GREEN ? ( - - ) : ( - - ) - ) : ( - - )}
    {showCurrent && activeSite ? activeSite.host : 'All Projects'}
      - {!showCurrent &&
    • {'Project selection is not applicable.'}
    • } + {isAdmin && ( + + )} {sites.map((site) => (
    • this.switchSite(site.id)}> -
      - {site.host} + + {site.host}
    • ))}
    -
    ); diff --git a/frontend/app/components/Header/UserMenu/UserMenu.tsx b/frontend/app/components/Header/UserMenu/UserMenu.tsx index ee369cae7..c88f7ff1a 100644 --- a/frontend/app/components/Header/UserMenu/UserMenu.tsx +++ b/frontend/app/components/Header/UserMenu/UserMenu.tsx @@ -1,9 +1,11 @@ import React from 'react'; -import { withRouter } from 'react-router-dom'; +import { withRouter, RouteComponentProps } from 'react-router-dom'; import { connect } from 'react-redux'; import { logout } from 'Duck/user'; import { client, CLIENT_DEFAULT_TAB } from 'App/routes'; +import { Icon } from 'UI'; import cn from 'classnames'; +import { getInitials } from 'App/utils'; const CLIENT_PATH = client(CLIENT_DEFAULT_TAB); @@ -11,43 +13,45 @@ interface Props { history: any; onLogoutClick: any; className: string; + account: any; } -function UserMenu(props: Props) { +function UserMenu(props: RouteComponentProps) { + const { account, history, className, onLogoutClick }: any = props; + const onAccountClick = () => { - props.history.push(CLIENT_PATH); + history.push(CLIENT_PATH); }; return ( -
    +
    - SS + {getInitials(account.name)}
    -
    User Name
    -
    Admin - admin@gmail.com
    +
    {account.name}
    +
    {account.superAdmin ? 'Super Admin' : (account.admin ? 'Admin' : 'Member') } - {account.email}
    -
    - +
    + +
    -
    - +
    + +
    ); } -export default withRouter( - connect( - (state: any) => ({ - // account: state.getIn([ 'user', 'account' ]), - // siteId: state.getIn([ 'site', 'siteId' ]), - // sites: state.getIn([ 'site', 'list' ]), - // showAlerts: state.getIn([ 'dashboard', 'showAlerts' ]), - // boardingCompletion: state.getIn([ 'dashboard', 'boardingCompletion' ]) - }), - { onLogoutClick: logout } - )(UserMenu) -); +export default connect( + (state: any) => ({ + account: state.getIn(['user', 'account']), + }), + { onLogoutClick: logout } +)(withRouter(UserMenu)) as React.FunctionComponent>; // export default UserMenu; diff --git a/frontend/app/components/Header/header.module.css b/frontend/app/components/Header/header.module.css index f24561aa3..5b2bbd46f 100644 --- a/frontend/app/components/Header/header.module.css +++ b/frontend/app/components/Header/header.module.css @@ -4,13 +4,13 @@ $height: 50px; .header { - position: fixed; - width: 100%; - display: flex; - justify-content: space-between; + /* position: fixed; */ + /* width: 100%; */ + /* display: flex; */ + /* justify-content: space-between; */ border-bottom: solid thin $gray-light; /* padding: 0 15px; */ - background: $white; + /* background: $white; */ z-index: $header; } @@ -45,7 +45,7 @@ $height: 50px; } .right { - margin-left: auto; + /* margin-left: auto; */ position: relative; cursor: default; display: flex; @@ -101,7 +101,7 @@ $height: 50px; border-top: 1px solid $gray-light; } } - & a, & button { + /* & a, & button { color: $gray-darkest; display: block; cursor: pointer; @@ -112,7 +112,7 @@ $height: 50px; &:hover { background-color: $gray-lightest; } - } + } */ } .userIcon { diff --git a/frontend/app/components/Header/siteDropdown.module.css b/frontend/app/components/Header/siteDropdown.module.css index 769f07f29..508eb81c8 100644 --- a/frontend/app/components/Header/siteDropdown.module.css +++ b/frontend/app/components/Header/siteDropdown.module.css @@ -7,9 +7,14 @@ justify-content: flex-start; position: relative; user-select: none; - + border: solid thin transparent; + height: 30px; + border-radius: 3px; + margin: 10px; + &:hover { - background-color: $gray-lightest; + background-color: $active-blue; + /* border: solid thin $active-blue-border; */ & .drodownIcon { transform: rotate(180deg); transition: all 0.2s; @@ -39,11 +44,12 @@ & .menu { display: none; position: absolute; - top: 50px; + top: 28px; left: -1px; background-color: white; min-width: 200px; z-index: 2; + border-radius: 3px; border: 1px solid $gray-light; } @@ -68,9 +74,13 @@ &:hover { background-color: $gray-lightest; transition: all 0.2s; + color: $teal; + svg { + fill: $teal; + } } &:first-child { - border-top: 1px solid $gray-light; + /* border-top: 1px solid $gray-light; */ } } } diff --git a/frontend/app/components/ui/SVG.tsx b/frontend/app/components/ui/SVG.tsx index 7ab78788c..b12dd305b 100644 --- a/frontend/app/components/ui/SVG.tsx +++ b/frontend/app/components/ui/SVG.tsx @@ -1,7 +1,7 @@ import React from 'react'; -export type IconNames = 'alarm-clock' | 'alarm-plus' | 'all-sessions' | 'analytics' | 'anchor' | 'arrow-alt-square-right' | 'arrow-clockwise' | 'arrow-down' | 'arrow-right-short' | 'arrow-square-left' | 'arrow-square-right' | 'arrow-up' | 'arrows-angle-extend' | 'avatar/icn_bear' | 'avatar/icn_beaver' | 'avatar/icn_bird' | 'avatar/icn_bison' | 'avatar/icn_camel' | 'avatar/icn_chameleon' | 'avatar/icn_deer' | 'avatar/icn_dog' | 'avatar/icn_dolphin' | 'avatar/icn_elephant' | 'avatar/icn_fish' | 'avatar/icn_fox' | 'avatar/icn_gorilla' | 'avatar/icn_hippo' | 'avatar/icn_horse' | 'avatar/icn_hyena' | 'avatar/icn_kangaroo' | 'avatar/icn_lemur' | 'avatar/icn_mammel' | 'avatar/icn_monkey' | 'avatar/icn_moose' | 'avatar/icn_panda' | 'avatar/icn_penguin' | 'avatar/icn_porcupine' | 'avatar/icn_quail' | 'avatar/icn_rabbit' | 'avatar/icn_rhino' | 'avatar/icn_sea_horse' | 'avatar/icn_sheep' | 'avatar/icn_snake' | 'avatar/icn_squirrel' | 'avatar/icn_tapir' | 'avatar/icn_turtle' | 'avatar/icn_vulture' | 'avatar/icn_wild1' | 'avatar/icn_wild_bore' | 'ban' | 'bar-chart-line' | 'bar-pencil' | 'bell-fill' | 'bell-plus' | 'bell' | 'binoculars' | 'book' | 'browser/browser' | 'browser/chrome' | 'browser/edge' | 'browser/electron' | 'browser/facebook' | 'browser/firefox' | 'browser/ie' | 'browser/opera' | 'browser/safari' | 'bullhorn' | 'business-time' | 'calendar-alt' | 'calendar-check' | 'calendar-day' | 'calendar' | 'call' | 'camera-alt' | 'camera-video-off' | 'camera-video' | 'camera' | 'caret-down-fill' | 'caret-left-fill' | 'caret-right-fill' | 'caret-up-fill' | 'chat-dots' | 'chat-right-text' | 'chat-square-quote' | 'check-circle' | 'check' | 'chevron-double-left' | 'chevron-double-right' | 'chevron-down' | 'chevron-left' | 'chevron-right' | 'chevron-up' | 'circle-fill' | 'circle' | 'clipboard-list-check' | 'clock' | 'close' | 'cloud-fog2-fill' | 'code' | 'cog' | 'cogs' | 'collection' | 'columns-gap-filled' | 'columns-gap' | 'console/error' | 'console/exception' | 'console/info' | 'console/warning' | 'console' | 'controller' | 'cookies' | 'copy' | 'credit-card-front' | 'cubes' | 'dashboard-icn' | 'desktop' | 'device' | 'diagram-3' | 'dizzy' | 'doublecheck' | 'download' | 'drag' | 'edit' | 'ellipsis-v' | 'enter' | 'envelope' | 'errors-icon' | 'event/click' | 'event/clickrage' | 'event/code' | 'event/i-cursor' | 'event/input' | 'event/link' | 'event/location' | 'event/resize' | 'event/view' | 'exclamation-circle' | 'expand-wide' | 'explosion' | 'external-link-alt' | 'eye-slash-fill' | 'eye-slash' | 'eye' | 'fetch' | 'file-code' | 'file-medical-alt' | 'file' | 'filter' | 'filters/arrow-return-right' | 'filters/browser' | 'filters/click' | 'filters/clickrage' | 'filters/code' | 'filters/console' | 'filters/country' | 'filters/cpu-load' | 'filters/custom' | 'filters/device' | 'filters/dom-complete' | 'filters/duration' | 'filters/error' | 'filters/fetch-failed' | 'filters/fetch' | 'filters/file-code' | 'filters/graphql' | 'filters/i-cursor' | 'filters/input' | 'filters/lcpt' | 'filters/link' | 'filters/location' | 'filters/memory-load' | 'filters/metadata' | 'filters/os' | 'filters/perfromance-network-request' | 'filters/platform' | 'filters/referrer' | 'filters/resize' | 'filters/rev-id' | 'filters/state-action' | 'filters/ttfb' | 'filters/user-alt' | 'filters/userid' | 'filters/view' | 'flag-na' | 'fullscreen' | 'funnel/cpu-fill' | 'funnel/cpu' | 'funnel/dizzy' | 'funnel/emoji-angry-fill' | 'funnel/emoji-angry' | 'funnel/emoji-dizzy-fill' | 'funnel/exclamation-circle-fill' | 'funnel/exclamation-circle' | 'funnel/file-earmark-break-fill' | 'funnel/file-earmark-break' | 'funnel/file-earmark-minus-fill' | 'funnel/file-earmark-minus' | 'funnel/file-medical-alt' | 'funnel/file-x' | 'funnel/hdd-fill' | 'funnel/hourglass-top' | 'funnel/image-fill' | 'funnel/image' | 'funnel/microchip' | 'funnel/mouse' | 'funnel/patch-exclamation-fill' | 'funnel/sd-card' | 'funnel-fill' | 'funnel-new' | 'funnel' | 'gear-fill' | 'geo-alt-fill-custom' | 'github' | 'graph-up-arrow' | 'graph-up' | 'grid-3x3' | 'grid-check' | 'grid-horizontal' | 'grip-horizontal' | 'hash' | 'hdd-stack' | 'headset' | 'heart-rate' | 'high-engagement' | 'history' | 'hourglass-start' | 'id-card' | 'image' | 'info-circle-fill' | 'info-circle' | 'info-square' | 'info' | 'inspect' | 'integrations/assist' | 'integrations/bugsnag-text' | 'integrations/bugsnag' | 'integrations/cloudwatch-text' | 'integrations/cloudwatch' | 'integrations/datadog' | 'integrations/elasticsearch-text' | 'integrations/elasticsearch' | 'integrations/github' | 'integrations/graphql' | 'integrations/jira-text' | 'integrations/jira' | 'integrations/mobx' | 'integrations/newrelic-text' | 'integrations/newrelic' | 'integrations/ngrx' | 'integrations/openreplay-text' | 'integrations/openreplay' | 'integrations/redux' | 'integrations/rollbar-text' | 'integrations/rollbar' | 'integrations/segment' | 'integrations/sentry-text' | 'integrations/sentry' | 'integrations/slack-bw' | 'integrations/slack' | 'integrations/stackdriver' | 'integrations/sumologic-text' | 'integrations/sumologic' | 'integrations/vuejs' | 'journal-code' | 'layer-group' | 'lightbulb-on' | 'lightbulb' | 'link-45deg' | 'list-alt' | 'list-ul' | 'list' | 'lock-alt' | 'map-marker-alt' | 'memory' | 'mic-mute' | 'mic' | 'minus' | 'mobile' | 'mouse-alt' | 'next1' | 'no-dashboard' | 'no-metrics-chart' | 'no-metrics' | 'os/android' | 'os/chrome_os' | 'os/fedora' | 'os/ios' | 'os/linux' | 'os/mac_os_x' | 'os/other' | 'os/ubuntu' | 'os/windows' | 'os' | 'pause-fill' | 'pause' | 'pdf-download' | 'pencil-stop' | 'pencil' | 'percent' | 'performance-icon' | 'person-fill' | 'person' | 'pie-chart-fill' | 'pin-fill' | 'play-circle-light' | 'play-circle' | 'play-fill-new' | 'play-fill' | 'play-hover' | 'play' | 'plus-circle' | 'plus' | 'prev1' | 'puzzle-piece' | 'question-circle' | 'question-lg' | 'quote-left' | 'quote-right' | 'redo-back' | 'redo' | 'remote-control' | 'replay-10' | 'resources-icon' | 'safe-fill' | 'safe' | 'sandglass' | 'search' | 'search_notification' | 'server' | 'share-alt' | 'shield-lock' | 'signup' | 'skip-forward-fill' | 'skip-forward' | 'slack' | 'slash-circle' | 'sliders' | 'social/slack' | 'social/trello' | 'spinner' | 'star-solid' | 'star' | 'step-forward' | 'stopwatch' | 'store' | 'sync-alt' | 'table-new' | 'table' | 'tablet-android' | 'tachometer-slow' | 'tachometer-slowest' | 'tags' | 'team-funnel' | 'telephone-fill' | 'telephone' | 'text-paragraph' | 'tools' | 'trash' | 'turtle' | 'user-alt' | 'user-circle' | 'user-friends' | 'users' | 'vendors/graphql' | 'vendors/mobx' | 'vendors/ngrx' | 'vendors/redux' | 'vendors/vuex' | 'web-vitals' | 'wifi' | 'window-alt' | 'window-restore' | 'window-x' | 'window' | 'zoom-in'; +export type IconNames = 'alarm-clock' | 'alarm-plus' | 'all-sessions' | 'analytics' | 'anchor' | 'arrow-alt-square-right' | 'arrow-bar-left' | 'arrow-clockwise' | 'arrow-down' | 'arrow-right-short' | 'arrow-square-left' | 'arrow-square-right' | 'arrow-up' | 'arrows-angle-extend' | 'avatar/icn_bear' | 'avatar/icn_beaver' | 'avatar/icn_bird' | 'avatar/icn_bison' | 'avatar/icn_camel' | 'avatar/icn_chameleon' | 'avatar/icn_deer' | 'avatar/icn_dog' | 'avatar/icn_dolphin' | 'avatar/icn_elephant' | 'avatar/icn_fish' | 'avatar/icn_fox' | 'avatar/icn_gorilla' | 'avatar/icn_hippo' | 'avatar/icn_horse' | 'avatar/icn_hyena' | 'avatar/icn_kangaroo' | 'avatar/icn_lemur' | 'avatar/icn_mammel' | 'avatar/icn_monkey' | 'avatar/icn_moose' | 'avatar/icn_panda' | 'avatar/icn_penguin' | 'avatar/icn_porcupine' | 'avatar/icn_quail' | 'avatar/icn_rabbit' | 'avatar/icn_rhino' | 'avatar/icn_sea_horse' | 'avatar/icn_sheep' | 'avatar/icn_snake' | 'avatar/icn_squirrel' | 'avatar/icn_tapir' | 'avatar/icn_turtle' | 'avatar/icn_vulture' | 'avatar/icn_wild1' | 'avatar/icn_wild_bore' | 'ban' | 'bar-chart-line' | 'bar-pencil' | 'bell-fill' | 'bell-plus' | 'bell-slash' | 'bell' | 'binoculars' | 'book' | 'browser/browser' | 'browser/chrome' | 'browser/edge' | 'browser/electron' | 'browser/facebook' | 'browser/firefox' | 'browser/ie' | 'browser/opera' | 'browser/safari' | 'bullhorn' | 'business-time' | 'calendar-alt' | 'calendar-check' | 'calendar-day' | 'calendar' | 'call' | 'camera-alt' | 'camera-video-off' | 'camera-video' | 'camera' | 'caret-down-fill' | 'caret-left-fill' | 'caret-right-fill' | 'caret-up-fill' | 'chat-dots' | 'chat-right-text' | 'chat-square-quote' | 'check-circle' | 'check' | 'chevron-double-left' | 'chevron-double-right' | 'chevron-down' | 'chevron-left' | 'chevron-right' | 'chevron-up' | 'circle-fill' | 'circle' | 'clipboard-list-check' | 'clock' | 'close' | 'cloud-fog2-fill' | 'code' | 'cog' | 'cogs' | 'collection' | 'columns-gap-filled' | 'columns-gap' | 'console/error' | 'console/exception' | 'console/info' | 'console/warning' | 'console' | 'controller' | 'cookies' | 'copy' | 'credit-card-front' | 'cubes' | 'dashboard-icn' | 'desktop' | 'device' | 'diagram-3' | 'dizzy' | 'door-closed' | 'doublecheck' | 'download' | 'drag' | 'edit' | 'ellipsis-v' | 'enter' | 'envelope' | 'errors-icon' | 'event/click' | 'event/clickrage' | 'event/code' | 'event/i-cursor' | 'event/input' | 'event/link' | 'event/location' | 'event/resize' | 'event/view' | 'exclamation-circle' | 'expand-wide' | 'explosion' | 'external-link-alt' | 'eye-slash-fill' | 'eye-slash' | 'eye' | 'fetch' | 'file-code' | 'file-medical-alt' | 'file' | 'filter' | 'filters/arrow-return-right' | 'filters/browser' | 'filters/click' | 'filters/clickrage' | 'filters/code' | 'filters/console' | 'filters/country' | 'filters/cpu-load' | 'filters/custom' | 'filters/device' | 'filters/dom-complete' | 'filters/duration' | 'filters/error' | 'filters/fetch-failed' | 'filters/fetch' | 'filters/file-code' | 'filters/graphql' | 'filters/i-cursor' | 'filters/input' | 'filters/lcpt' | 'filters/link' | 'filters/location' | 'filters/memory-load' | 'filters/metadata' | 'filters/os' | 'filters/perfromance-network-request' | 'filters/platform' | 'filters/referrer' | 'filters/resize' | 'filters/rev-id' | 'filters/state-action' | 'filters/ttfb' | 'filters/user-alt' | 'filters/userid' | 'filters/view' | 'flag-na' | 'folder-plus' | 'folder2' | 'fullscreen' | 'funnel/cpu-fill' | 'funnel/cpu' | 'funnel/dizzy' | 'funnel/emoji-angry-fill' | 'funnel/emoji-angry' | 'funnel/emoji-dizzy-fill' | 'funnel/exclamation-circle-fill' | 'funnel/exclamation-circle' | 'funnel/file-earmark-break-fill' | 'funnel/file-earmark-break' | 'funnel/file-earmark-minus-fill' | 'funnel/file-earmark-minus' | 'funnel/file-medical-alt' | 'funnel/file-x' | 'funnel/hdd-fill' | 'funnel/hourglass-top' | 'funnel/image-fill' | 'funnel/image' | 'funnel/microchip' | 'funnel/mouse' | 'funnel/patch-exclamation-fill' | 'funnel/sd-card' | 'funnel-fill' | 'funnel-new' | 'funnel' | 'gear-fill' | 'geo-alt-fill-custom' | 'github' | 'graph-up-arrow' | 'graph-up' | 'grid-3x3' | 'grid-check' | 'grid-horizontal' | 'grip-horizontal' | 'hash' | 'hdd-stack' | 'headset' | 'heart-rate' | 'high-engagement' | 'history' | 'hourglass-start' | 'id-card' | 'image' | 'info-circle-fill' | 'info-circle' | 'info-square' | 'info' | 'inspect' | 'integrations/assist' | 'integrations/bugsnag-text' | 'integrations/bugsnag' | 'integrations/cloudwatch-text' | 'integrations/cloudwatch' | 'integrations/datadog' | 'integrations/elasticsearch-text' | 'integrations/elasticsearch' | 'integrations/github' | 'integrations/graphql' | 'integrations/jira-text' | 'integrations/jira' | 'integrations/mobx' | 'integrations/newrelic-text' | 'integrations/newrelic' | 'integrations/ngrx' | 'integrations/openreplay-text' | 'integrations/openreplay' | 'integrations/redux' | 'integrations/rollbar-text' | 'integrations/rollbar' | 'integrations/segment' | 'integrations/sentry-text' | 'integrations/sentry' | 'integrations/slack-bw' | 'integrations/slack' | 'integrations/stackdriver' | 'integrations/sumologic-text' | 'integrations/sumologic' | 'integrations/vuejs' | 'journal-code' | 'layer-group' | 'lightbulb-on' | 'lightbulb' | 'link-45deg' | 'list-alt' | 'list-ul' | 'list' | 'lock-alt' | 'map-marker-alt' | 'memory' | 'mic-mute' | 'mic' | 'minus' | 'mobile' | 'mouse-alt' | 'next1' | 'no-dashboard' | 'no-metrics-chart' | 'no-metrics' | 'os/android' | 'os/chrome_os' | 'os/fedora' | 'os/ios' | 'os/linux' | 'os/mac_os_x' | 'os/other' | 'os/ubuntu' | 'os/windows' | 'os' | 'pause-fill' | 'pause' | 'pdf-download' | 'pencil-stop' | 'pencil' | 'percent' | 'performance-icon' | 'person-fill' | 'person' | 'pie-chart-fill' | 'pin-fill' | 'play-circle-light' | 'play-circle' | 'play-fill-new' | 'play-fill' | 'play-hover' | 'play' | 'plus-circle' | 'plus' | 'prev1' | 'puzzle-piece' | 'puzzle' | 'question-circle' | 'question-lg' | 'quote-left' | 'quote-right' | 'redo-back' | 'redo' | 'remote-control' | 'replay-10' | 'resources-icon' | 'safe-fill' | 'safe' | 'sandglass' | 'search' | 'search_notification' | 'server' | 'share-alt' | 'shield-lock' | 'signup' | 'skip-forward-fill' | 'skip-forward' | 'slack' | 'slash-circle' | 'sliders' | 'social/slack' | 'social/trello' | 'spinner' | 'star-solid' | 'star' | 'step-forward' | 'stopwatch' | 'store' | 'sync-alt' | 'table-new' | 'table' | 'tablet-android' | 'tachometer-slow' | 'tachometer-slowest' | 'tags' | 'team-funnel' | 'telephone-fill' | 'telephone' | 'text-paragraph' | 'tools' | 'trash' | 'turtle' | 'user-alt' | 'user-circle' | 'user-friends' | 'users' | 'vendors/graphql' | 'vendors/mobx' | 'vendors/ngrx' | 'vendors/redux' | 'vendors/vuex' | 'web-vitals' | 'wifi' | 'window-alt' | 'window-restore' | 'window-x' | 'window' | 'zoom-in'; interface Props { name: IconNames; @@ -21,6 +21,7 @@ const SVG = (props: Props) => { case 'analytics': return ; case 'anchor': return ; case 'arrow-alt-square-right': return ; + case 'arrow-bar-left': return ; case 'arrow-clockwise': return ; case 'arrow-down': return ; case 'arrow-right-short': return ; @@ -69,6 +70,7 @@ const SVG = (props: Props) => { case 'bar-pencil': return ; case 'bell-fill': return ; case 'bell-plus': return ; + case 'bell-slash': return ; case 'bell': return ; case 'binoculars': return ; case 'book': return ; @@ -134,6 +136,7 @@ const SVG = (props: Props) => { case 'device': return ; case 'diagram-3': return ; case 'dizzy': return ; + case 'door-closed': return ; case 'doublecheck': return ; case 'download': return ; case 'drag': return ; @@ -199,6 +202,8 @@ const SVG = (props: Props) => { case 'filters/userid': return ; case 'filters/view': return ; case 'flag-na': return ; + case 'folder-plus': return ; + case 'folder2': return ; case 'fullscreen': return ; case 'funnel/cpu-fill': return ; case 'funnel/cpu': return ; @@ -329,6 +334,7 @@ const SVG = (props: Props) => { case 'plus': return ; case 'prev1': return ; case 'puzzle-piece': return ; + case 'puzzle': return ; case 'question-circle': return ; case 'question-lg': return ; case 'quote-left': return ; diff --git a/frontend/app/svg/icons/arrow-bar-left.svg b/frontend/app/svg/icons/arrow-bar-left.svg new file mode 100644 index 000000000..3558e407e --- /dev/null +++ b/frontend/app/svg/icons/arrow-bar-left.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/frontend/app/svg/icons/bell-slash.svg b/frontend/app/svg/icons/bell-slash.svg new file mode 100644 index 000000000..7b2a23785 --- /dev/null +++ b/frontend/app/svg/icons/bell-slash.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/frontend/app/svg/icons/door-closed.svg b/frontend/app/svg/icons/door-closed.svg new file mode 100644 index 000000000..5b9db0f13 --- /dev/null +++ b/frontend/app/svg/icons/door-closed.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/frontend/app/svg/icons/folder-plus.svg b/frontend/app/svg/icons/folder-plus.svg new file mode 100644 index 000000000..9e6d99b36 --- /dev/null +++ b/frontend/app/svg/icons/folder-plus.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/frontend/app/svg/icons/folder2.svg b/frontend/app/svg/icons/folder2.svg new file mode 100644 index 000000000..273f49c63 --- /dev/null +++ b/frontend/app/svg/icons/folder2.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/frontend/app/svg/icons/puzzle.svg b/frontend/app/svg/icons/puzzle.svg new file mode 100644 index 000000000..928b53546 --- /dev/null +++ b/frontend/app/svg/icons/puzzle.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file From 8baf050a006edd01b6f6af371b3a07c3276f133c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 27 Sep 2022 12:38:56 +0200 Subject: [PATCH 044/592] feat(chalice): refactored code feat(chalice): code cleanup --- api/chalicelib/core/authorizers.py | 2 +- api/chalicelib/core/signup.py | 2 +- api/chalicelib/utils/helper.py | 81 +----------------------------- 3 files changed, 3 insertions(+), 82 deletions(-) diff --git a/api/chalicelib/core/authorizers.py b/api/chalicelib/core/authorizers.py index c32f08208..a474fcb8d 100644 --- a/api/chalicelib/core/authorizers.py +++ b/api/chalicelib/core/authorizers.py @@ -15,7 +15,7 @@ def jwt_authorizer(token): token[1], config("jwt_secret"), algorithms=config("jwt_algorithm"), - audience=[f"plugin:{helper.get_stage_name()}", f"front:{helper.get_stage_name()}"] + audience=[ f"front:{helper.get_stage_name()}"] ) except jwt.ExpiredSignatureError: print("! JWT Expired signature") diff --git a/api/chalicelib/core/signup.py b/api/chalicelib/core/signup.py index 23c2c8744..9106084ad 100644 --- a/api/chalicelib/core/signup.py +++ b/api/chalicelib/core/signup.py @@ -21,7 +21,7 @@ def create_step1(data: schemas.UserSignupSchema): password = data.password print("Verifying email validity") - if email is None or len(email) < 5 or not helper.is_valid_email(email): + if email is None or len(email) < 5: errors.append("Invalid email address.") else: print("Verifying email existance") diff --git a/api/chalicelib/utils/helper.py b/api/chalicelib/utils/helper.py index 192c309f5..919ac9d9f 100644 --- a/api/chalicelib/utils/helper.py +++ b/api/chalicelib/utils/helper.py @@ -18,40 +18,13 @@ def get_version_number(): def get_stage_name(): - stage = config("STAGE") - return stage[len(local_prefix):] if stage.startswith(local_prefix) else stage - - -def is_production(): - return get_stage_name() == "production" - - -def is_staging(): - return get_stage_name() == "staging" - - -def is_onprem(): - return not is_production() and not is_staging() - - -def is_local(): - return config("STAGE").startswith(local_prefix) + return "OpenReplay" def generate_salt(): return "".join(random.choices(string.hexdigits, k=36)) -def unique_ordered_list(array): - uniq = [] - [uniq.append(x) for x in array if x not in uniq] - return uniq - - -def unique_unordered_list(array): - return list(set(array)) - - def list_to_camel_case(items, flatten=False): for i in range(len(items)): if flatten: @@ -130,12 +103,6 @@ def key_to_snake_case(name, delimiter='_', split_number=False): TRACK_TIME = True -def __sbool_to_bool(value): - if value is None or not isinstance(value, str): - return False - return value.lower() in ["true", "yes", "1"] - - def allow_captcha(): return config("captcha_server", default=None) is not None and config("captcha_key", default=None) is not None \ and len(config("captcha_server")) > 0 and len(config("captcha_key")) > 0 @@ -210,54 +177,11 @@ def values_for_operator(value: Union[str, list], op: schemas.SearchEventOperator return value -def is_valid_email(email): - return re.match(r"[^@]+@[^@]+\.[^@]+", email) is not None - - -def is_valid_http_url(url): - regex = re.compile( - r'^(?:http|ftp)s?://' # http:// or https:// - r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain... - r'localhost|' # localhost... - r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip - r'(?::\d+)?' # optional port - r'(?:/?|[/?]\S+)$', re.IGNORECASE) - - return re.match(regex, url) is not None - - -def is_valid_url(url): - regex = re.compile( - # r'^(?:http|ftp)s?://' # http:// or https:// - r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain... - r'localhost|' # localhost... - r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip - r'(?::\d+)?' # optional port - r'(?:/?|[/?]\S+)$', re.IGNORECASE) - - return re.match(regex, url) is not None - - -def is_alphabet_space(word): - r = re.compile("^[a-zA-Z ]*$") - return r.match(word) is not None - - -def is_alphabet_latin_space(word): - r = re.compile("^[a-zA-Z\u00C0-\u00D6\u00D8-\u00f6\u00f8-\u00ff\s ]*$") - return r.match(word) is not None - - def is_alphabet_space_dash(word): r = re.compile("^[a-zA-Z -]*$") return r.match(word) is not None -def is_alphanumeric_space(word): - r = re.compile("^[a-zA-Z0-9._\- ]*$") - return r.match(word) is not None - - def merge_lists_by_key(l1, l2, key): merged = {} for item in l1 + l2: @@ -310,9 +234,6 @@ def explode_widget(data, key=None): return result -TEMP_PATH = "./" if is_local() else "/tmp/" - - def get_issue_title(issue_type): return {'click_rage': "Click Rage", 'dead_click': "Dead Click", From 8931e118a0b59d360491c32926a4110e228e3489 Mon Sep 17 00:00:00 2001 From: sylenien Date: Tue, 27 Sep 2022 16:12:09 +0200 Subject: [PATCH 045/592] change(tracker): 4.1.2 --- tracker/tracker-vuex/src/index.ts | 3 ++- tracker/tracker/package.json | 2 +- tracker/tracker/src/main/app/index.ts | 6 +++++- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/tracker/tracker-vuex/src/index.ts b/tracker/tracker-vuex/src/index.ts index 380be9081..27767ed10 100644 --- a/tracker/tracker-vuex/src/index.ts +++ b/tracker/tracker-vuex/src/index.ts @@ -25,7 +25,8 @@ function processMutationAndState( const _table = encoder.commit(); for (let key in _table) app.send(Messages.OTable(key, _table[key])); app.send(Messages.Vuex(_mutation, _state)); - } catch { + } catch (e) { + console.error(e) encoder.clear(); } } diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index 0943c4104..071a5c409 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "4.1.1", + "version": "4.1.2", "keywords": [ "logging", "replay" diff --git a/tracker/tracker/src/main/app/index.ts b/tracker/tracker/src/main/app/index.ts index 769cc8672..338184b59 100644 --- a/tracker/tracker/src/main/app/index.ts +++ b/tracker/tracker/src/main/app/index.ts @@ -304,8 +304,12 @@ export default class App { this.debug.error('OpenReplay error: Unable to build session URL') return undefined } + const ingest = this.options.ingestPoint + const isSaas = ingest === DEFAULT_INGEST_POINT - return this.options.ingestPoint.replace(/ingest$/, `${projectID}/session/${sessionID}`) + const projectPath = isSaas ? ingest.replace('api', 'app') : ingest + + return projectPath.replace(/ingest$/, `${projectID}/session/${sessionID}`) } getHost(): string { From 45f3d7b793dabc012e8ab2ae5ea014ad03d956e1 Mon Sep 17 00:00:00 2001 From: sylenien Date: Tue, 27 Sep 2022 16:45:11 +0200 Subject: [PATCH 046/592] change(tracker): tracker-vuex 4.0.3 --- tracker/tracker-vuex/package.json | 2 +- tracker/tracker-vuex/src/index.ts | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/tracker/tracker-vuex/package.json b/tracker/tracker-vuex/package.json index 97c9831ce..b4fa2ff50 100644 --- a/tracker/tracker-vuex/package.json +++ b/tracker/tracker-vuex/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-vuex", "description": "Tracker plugin for Vuex state recording", - "version": "4.0.2", + "version": "4.0.3", "keywords": [ "vuex", "logging", diff --git a/tracker/tracker-vuex/src/index.ts b/tracker/tracker-vuex/src/index.ts index 27767ed10..a4ca0a676 100644 --- a/tracker/tracker-vuex/src/index.ts +++ b/tracker/tracker-vuex/src/index.ts @@ -50,6 +50,7 @@ export default function(opts: Partial = {}) { return (storeName: string) => (store) => { // Vuex if (store.subscribe) { + app.debug.log('Hooked to vuex store') const randomId = Math.random().toString(36).substring(2, 9) store.subscribe((mutation, storeState) => { state[storeName || randomId] = storeState @@ -59,6 +60,7 @@ export default function(opts: Partial = {}) { // Pinia if (store.$onAction) { + app.debug.log('Hooked to pinia store') store.$onAction(({ name, store, args }) => { try { state[storeName || store.$id] = store.$state; From 976bf7713e326c16c25d48df18236f485dfccd61 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 27 Sep 2022 17:57:23 +0200 Subject: [PATCH 047/592] feat(chalice): session notes --- api/auth/auth_project.py | 4 +- api/chalicelib/core/sessions_notes.py | 105 ++++++++++++++++++++++++++ api/routers/core_dynamic.py | 56 +++++++++++++- api/schemas.py | 28 +++++++ ee/api/.gitignore | 1 + ee/api/clean.sh | 1 + ee/api/routers/core_dynamic.py | 61 ++++++++++++++- 7 files changed, 252 insertions(+), 4 deletions(-) create mode 100644 api/chalicelib/core/sessions_notes.py diff --git a/api/auth/auth_project.py b/api/auth/auth_project.py index 6f842916b..0f28b4162 100644 --- a/api/auth/auth_project.py +++ b/api/auth/auth_project.py @@ -17,8 +17,8 @@ class ProjectAuthorizer: current_user: schemas.CurrentContext = await OR_context(request) value = request.path_params[self.project_identifier] if (self.project_identifier == "projectId" \ - and not (isinstance(value, int) or isinstance(value, str) and value.isnumeric()) - and projects.get_project(project_id=value, tenant_id=current_user.tenant_id) is None) \ + and (not (isinstance(value, int) or isinstance(value, str) and value.isnumeric()) + or projects.get_project(project_id=value, tenant_id=current_user.tenant_id) is None)) \ or (self.project_identifier == "projectKey" \ and projects.get_internal_project_id(project_key=value) is None): print("project not found") diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py new file mode 100644 index 000000000..916af221a --- /dev/null +++ b/api/chalicelib/core/sessions_notes.py @@ -0,0 +1,105 @@ +import json + +import schemas +from chalicelib.core import users +from chalicelib.utils import pg_client, helper, dev +from chalicelib.utils.TimeUTC import TimeUTC + + +def get_session_notes(tenant_id, project_id, session_id, user_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify(f"""SELECT sessions_notes.* + FROM sessions_notes + INNER JOIN users USING (user_id) + WHERE sessions_notes.project_id = %(project_id)s + AND sessions_notes.deleted_at IS NULL + AND sessions_notes.session_id = %(session_id)s + AND (sessions_notes.user_id = %(user_id)s + OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s) + ORDER BY created_at DESC;""", + {"project_id": project_id, "user_id": user_id, + "tenant_id": tenant_id, "session_id": session_id}) + + rows = cur.fetchall() + rows = helper.list_to_camel_case(rows) + for row in rows: + row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) + return rows + + +def get_all_notes(tenant_id, project_id, user_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify(f"""SELECT sessions_notes.* + FROM sessions_notes + INNER JOIN users USING (user_id) + WHERE sessions_notes.project_id = %(project_id)s + AND sessions_notes.deleted_at IS NULL + AND (sessions_notes.user_id = %(user_id)s + OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s) + ORDER BY created_at DESC;""", + {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}) + + cur.execute(query=query) + rows = cur.fetchall() + rows = helper.list_to_camel_case(rows) + for row in rows: + row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) + return rows + + +def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNoteSchema): + with pg_client.PostgresClient() as cur: + query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tags, session_id, project_id, timestamp, is_public) + VALUES (%(message)s, %(user_id)s, %(tags)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s) + RETURNING *;""", + {"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.dict()}) + cur.execute(query) + result = cur.fetchone() + return helper.dict_to_camel_case(result) + + +def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNoteSchema): + sub_query = [] + if data.message is not None: + sub_query.append("message = %(message)s") + if data.tags is not None: + sub_query.append("tags = %(tags)s") + if data.is_public is not None: + sub_query.append("is_public = %(is_public)s") + if data.timestamp is not None: + sub_query.append("timestamp = %(timestamp)s") + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify(f"""\ + UPDATE public.sessions_notes + SET + {" ,".join(sub_query)} + WHERE + project_id = %(project_id)s + AND user_id = %(user_id)s + AND note_id = %(note_id)s + AND deleted_at ISNULL + RETURNING *;""", + {"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.dict()}) + ) + row = helper.dict_to_camel_case(cur.fetchone()) + if row: + row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) + return row + + +def delete(tenant_id, user_id, project_id, note_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""\ + UPDATE public.sessions_notes + SET + deleted_at = timezone('utc'::text, now()) + WHERE + note_id = %(note_id)s + AND project_id = %(project_id)s\ + AND user_id = %(user_id)s + AND deleted_at ISNULL;""", + {"project_id": project_id, "user_id": user_id, "note_id": note_id}) + ) + return {"data": {"state": "success"}} diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index a9b50b4dc..d2357b319 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -6,7 +6,7 @@ from starlette.responses import RedirectResponse, FileResponse import schemas from chalicelib.core import sessions, errors, errors_viewed, errors_favorite, sessions_assignments, heatmaps, \ - sessions_favorite, assist + sessions_favorite, assist, sessions_notes from chalicelib.core import sessions_viewed from chalicelib.core import tenants, users, projects, license from chalicelib.core import webhook @@ -372,3 +372,57 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schem return { 'data': data } + + +@app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"]) +@app.put('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"]) +def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId, + session_id=sessionId, user_id=context.user_id, data=data) + if "errors" in data.keys(): + return data + return { + 'data': data + } + + +@app.get('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"]) +def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId, + session_id=sessionId, user_id=context.user_id) + if "errors" in data: + return data + return { + 'data': data + } + + +@app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"]) +@app.put('/{projectId}/notes/{noteId}', tags=["sessions", "notes"]) +def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, + note_id=noteId, data=data) + if "errors" in data.keys(): + return data + return { + 'data': data + } + + +@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"]) +def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.delete(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, + note_id=noteId) + return data + + +@app.get('/{projectId}/notes', tags=["sessions", "notes"]) +def get_all_notes(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.get_all_notes(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id) + if "errors" in data: + return data + return { + 'data': data + } diff --git a/api/schemas.py b/api/schemas.py index f6dc8b34b..9be29e84a 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1084,3 +1084,31 @@ class IntegrationType(str, Enum): stackdriver = "STACKDRIVER" cloudwatch = "CLOUDWATCH" newrelic = "NEWRELIC" + + +class SessionNoteSchema(BaseModel): + message: str = Field(..., min_length=2) + tags: List[str] = Field(default=[]) + timestamp: int = Field(default=-1) + is_public: bool = Field(default=False) + + class Config: + alias_generator = attribute_to_camel_case + + +class SessionUpdateNoteSchema(SessionNoteSchema): + message: Optional[str] = Field(default=None, min_length=2) + tags: Optional[List[str]] = Field(default=None) + timestamp: Optional[int] = Field(default=None, ge=-1) + is_public: Optional[bool] = Field(default=None) + + @root_validator + def validator(cls, values): + assert len(values.keys()) > 0, "at least 1 attribute should be provided for update" + c = 0 + for v in values.values(): + if v is not None and (not isinstance(v, str) or len(v) > 0): + c += 1 + break + assert c > 0, "at least 1 value should be provided for update" + return values diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 811b00301..924060617 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -213,6 +213,7 @@ Pipfile /chalicelib/core/sessions_assignments.py /chalicelib/core/sessions_metas.py /chalicelib/core/sessions_mobs.py +/chalicelib/core/sessions_notes.py #exp /chalicelib/core/significance.py /chalicelib/core/slack.py /chalicelib/core/socket_ios.py diff --git a/ee/api/clean.sh b/ee/api/clean.sh index ce58fe45e..53607cb25 100755 --- a/ee/api/clean.sh +++ b/ee/api/clean.sh @@ -35,6 +35,7 @@ rm -rf ./chalicelib/core/mobile.py rm -rf ./chalicelib/core/sessions_assignments.py rm -rf ./chalicelib/core/sessions_metas.py rm -rf ./chalicelib/core/sessions_mobs.py +rm -rf ./chalicelib/core/sessions_notes.py #exp rm -rf ./chalicelib/core/significance.py rm -rf ./chalicelib/core/slack.py rm -rf ./chalicelib/core/socket_ios.py diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index ed31fd56c..ee3c3a83f 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -7,7 +7,7 @@ from starlette.responses import RedirectResponse, FileResponse import schemas import schemas_ee from chalicelib.core import sessions, assist, heatmaps, sessions_favorite, sessions_assignments, errors, errors_viewed, \ - errors_favorite + errors_favorite, sessions_notes from chalicelib.core import sessions_viewed from chalicelib.core import tenants, users, projects, license from chalicelib.core import webhook @@ -396,3 +396,62 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schem return { 'data': data } + + +@app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"], + dependencies=[OR_scope(Permissions.session_replay)]) +@app.put('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"], + dependencies=[OR_scope(Permissions.session_replay)]) +def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId, + session_id=sessionId, user_id=context.user_id, data=data) + if "errors" in data.keys(): + return data + return { + 'data': data + } + + +@app.get('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"], + dependencies=[OR_scope(Permissions.session_replay)]) +def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId, + session_id=sessionId, user_id=context.user_id) + if "errors" in data: + return data + return { + 'data': data + } + + +@app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"], + dependencies=[OR_scope(Permissions.session_replay)]) +@app.put('/{projectId}/notes/{noteId}', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) +def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, + note_id=noteId, data=data) + if "errors" in data.keys(): + return data + return { + 'data': data + } + + +@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"], + dependencies=[OR_scope(Permissions.session_replay)]) +def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.delete(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, + note_id=noteId) + return data + + +@app.get('/{projectId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) +def get_all_notes(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.get_all_notes(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id) + if "errors" in data: + return data + return { + 'data': data + } From 6fada561493b5d5d4710c691de1e3a7a6847f60c Mon Sep 17 00:00:00 2001 From: sylenien Date: Wed, 28 Sep 2022 11:08:41 +0200 Subject: [PATCH 048/592] change(ui): change mobs url --- .../player/MessageDistributor/MessageDistributor.ts | 2 +- frontend/app/types/session/session.ts | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/frontend/app/player/MessageDistributor/MessageDistributor.ts b/frontend/app/player/MessageDistributor/MessageDistributor.ts index c5ac4f6b5..a79bb6261 100644 --- a/frontend/app/player/MessageDistributor/MessageDistributor.ts +++ b/frontend/app/player/MessageDistributor/MessageDistributor.ts @@ -217,7 +217,7 @@ export default class MessageDistributor extends StatedScreen { this.processStateUpdates(msgs) } - loadFiles(this.session.mobsUrl, + loadFiles(this.session.domURL, onData ) .then(() => this.onFileSuccessRead()) diff --git a/frontend/app/types/session/session.ts b/frontend/app/types/session/session.ts index 5eadadf4b..48bc7af2c 100644 --- a/frontend/app/types/session/session.ts +++ b/frontend/app/types/session/session.ts @@ -42,7 +42,7 @@ export default Record({ favorite: false, filterId: '', messagesUrl: '', - mobsUrl: [], + domURL: [], userBrowser: '', userBrowserVersion: '?', userCountry: '', @@ -82,7 +82,7 @@ export default Record({ agentIds: [], isCallActive: false }, { - fromJS:({ + fromJS:({ startTs=0, timestamp = 0, backendErrors=0, @@ -92,7 +92,7 @@ export default Record({ stackEvents = [], issues = [], sessionId, sessionID, - mobsUrl = [], + domURL = [], ...session }) => { const duration = Duration.fromMillis(session.duration < 1000 ? 1000 : session.duration); @@ -117,7 +117,7 @@ export default Record({ const missedResources = resources.filter(({ success }) => !success); const logs = List(session.logs).map(Log); - const stackEventsList = List(stackEvents) + const stackEventsList = List(stackEvents) .concat(List(session.userEvents)) .sortBy(se => se.timestamp) .map(se => StackEvent({ ...se, time: se.timestamp - startedAt })); @@ -149,7 +149,7 @@ export default Record({ issues: issuesList, sessionId: sessionId || sessionID, userId: session.userId || session.userID, - mobsUrl: Array.isArray(mobsUrl) ? mobsUrl : [ mobsUrl ] + domURL: Array.isArray(domURL) ? domURL : [ domURL ] }; }, idKey: "sessionId", From ab450b7945025e9a52df51486b0970692d0f405b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 28 Sep 2022 11:36:10 +0200 Subject: [PATCH 049/592] feat(chalice): session mobsUrl --- api/chalicelib/core/sessions.py | 1 + api/chalicelib/core/sessions_mobs.py | 20 ++++++++++++++++++++ ee/api/chalicelib/core/sessions.py | 1 + 3 files changed, 22 insertions(+) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 4a27d0b13..5b43ddbd1 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -95,6 +95,7 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id, session_id=session_id) data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id) data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, start_ts=data["startTs"], duration=data["duration"]) diff --git a/api/chalicelib/core/sessions_mobs.py b/api/chalicelib/core/sessions_mobs.py index 1107ee6d4..53385f6a6 100644 --- a/api/chalicelib/core/sessions_mobs.py +++ b/api/chalicelib/core/sessions_mobs.py @@ -26,6 +26,26 @@ def get_urls(project_id, session_id): return results +def get_urls_depercated(sessionId): + return [ + client.generate_presigned_url( + 'get_object', + Params={ + 'Bucket': config("sessions_bucket"), + 'Key': str(sessionId) + }, + ExpiresIn=100000 + ), + client.generate_presigned_url( + 'get_object', + Params={ + 'Bucket': config("sessions_bucket"), + 'Key': str(sessionId) + "e" + }, + ExpiresIn=100000 + )] + + def get_ios(session_id): return client.generate_presigned_url( 'get_object', diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py index 92c6e8f74..0c908c500 100644 --- a/ee/api/chalicelib/core/sessions.py +++ b/ee/api/chalicelib/core/sessions.py @@ -96,6 +96,7 @@ def get_by_id2_pg(project_id, session_id, user_id, context: schemas_ee.CurrentCo data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id, session_id=session_id) data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, context=context) data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, From 968d25e29e4cecc3bbe9a3d85c71e7e0f1fe3e18 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 28 Sep 2022 12:23:40 +0200 Subject: [PATCH 050/592] feat(chalice): session mobsUrl --- api/chalicelib/core/sessions_mobs.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/api/chalicelib/core/sessions_mobs.py b/api/chalicelib/core/sessions_mobs.py index 53385f6a6..3d966a47c 100644 --- a/api/chalicelib/core/sessions_mobs.py +++ b/api/chalicelib/core/sessions_mobs.py @@ -26,13 +26,13 @@ def get_urls(project_id, session_id): return results -def get_urls_depercated(sessionId): +def get_urls_depercated(session_id): return [ client.generate_presigned_url( 'get_object', Params={ 'Bucket': config("sessions_bucket"), - 'Key': str(sessionId) + 'Key': str(session_id) }, ExpiresIn=100000 ), @@ -40,7 +40,7 @@ def get_urls_depercated(sessionId): 'get_object', Params={ 'Bucket': config("sessions_bucket"), - 'Key': str(sessionId) + "e" + 'Key': str(session_id) + "e" }, ExpiresIn=100000 )] From 541a62e5de23b5970df3394b837875ea8d7be0ed Mon Sep 17 00:00:00 2001 From: sylenien Date: Wed, 28 Sep 2022 12:30:00 +0200 Subject: [PATCH 051/592] change(ui): change mobs url --- frontend/app/player/MessageDistributor/MessageDistributor.ts | 2 +- frontend/app/types/session/session.ts | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/frontend/app/player/MessageDistributor/MessageDistributor.ts b/frontend/app/player/MessageDistributor/MessageDistributor.ts index a79bb6261..f0033c9a6 100644 --- a/frontend/app/player/MessageDistributor/MessageDistributor.ts +++ b/frontend/app/player/MessageDistributor/MessageDistributor.ts @@ -217,7 +217,7 @@ export default class MessageDistributor extends StatedScreen { this.processStateUpdates(msgs) } - loadFiles(this.session.domURL, + loadFiles(this.session.mobsURL, onData ) .then(() => this.onFileSuccessRead()) diff --git a/frontend/app/types/session/session.ts b/frontend/app/types/session/session.ts index 48bc7af2c..ad2479bf1 100644 --- a/frontend/app/types/session/session.ts +++ b/frontend/app/types/session/session.ts @@ -43,6 +43,7 @@ export default Record({ filterId: '', messagesUrl: '', domURL: [], + mobsURL: [], userBrowser: '', userBrowserVersion: '?', userCountry: '', @@ -93,6 +94,7 @@ export default Record({ issues = [], sessionId, sessionID, domURL = [], + mobsURL = [], ...session }) => { const duration = Duration.fromMillis(session.duration < 1000 ? 1000 : session.duration); @@ -149,7 +151,8 @@ export default Record({ issues: issuesList, sessionId: sessionId || sessionID, userId: session.userId || session.userID, - domURL: Array.isArray(domURL) ? domURL : [ domURL ] + domURL: Array.isArray(domURL) ? domURL : [ domURL ], + mobsURL: Array.isArray(mobsURL) ? mobsURL : [ mobsURL ], }; }, idKey: "sessionId", From 8a1de30e5b53a188022bcd0936669cb1558634f6 Mon Sep 17 00:00:00 2001 From: sylenien Date: Wed, 28 Sep 2022 12:34:58 +0200 Subject: [PATCH 052/592] change(ui): fix mobs url case --- .../app/player/MessageDistributor/MessageDistributor.ts | 2 +- frontend/app/types/session/session.ts | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/frontend/app/player/MessageDistributor/MessageDistributor.ts b/frontend/app/player/MessageDistributor/MessageDistributor.ts index f0033c9a6..c5ac4f6b5 100644 --- a/frontend/app/player/MessageDistributor/MessageDistributor.ts +++ b/frontend/app/player/MessageDistributor/MessageDistributor.ts @@ -217,7 +217,7 @@ export default class MessageDistributor extends StatedScreen { this.processStateUpdates(msgs) } - loadFiles(this.session.mobsURL, + loadFiles(this.session.mobsUrl, onData ) .then(() => this.onFileSuccessRead()) diff --git a/frontend/app/types/session/session.ts b/frontend/app/types/session/session.ts index ad2479bf1..d66670963 100644 --- a/frontend/app/types/session/session.ts +++ b/frontend/app/types/session/session.ts @@ -43,7 +43,7 @@ export default Record({ filterId: '', messagesUrl: '', domURL: [], - mobsURL: [], + mobsUrl: [], userBrowser: '', userBrowserVersion: '?', userCountry: '', @@ -94,7 +94,7 @@ export default Record({ issues = [], sessionId, sessionID, domURL = [], - mobsURL = [], + mobsUrl = [], ...session }) => { const duration = Duration.fromMillis(session.duration < 1000 ? 1000 : session.duration); @@ -152,7 +152,7 @@ export default Record({ sessionId: sessionId || sessionID, userId: session.userId || session.userID, domURL: Array.isArray(domURL) ? domURL : [ domURL ], - mobsURL: Array.isArray(mobsURL) ? mobsURL : [ mobsURL ], + mobsUrl: Array.isArray(mobsUrl) ? mobsUrl : [ mobsUrl ], }; }, idKey: "sessionId", From 21632f8c466f3bc3917efbdd7df7134f249d275f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 28 Sep 2022 13:40:22 +0200 Subject: [PATCH 053/592] feat(DB): sessions_notes structure --- .../helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql | 15 +++++++++++++++ .../helm/db/init_dbs/postgresql/init_schema.sql | 17 ++++++++++++++++- .../helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql | 15 +++++++++++++++ .../helm/db/init_dbs/postgresql/init_schema.sql | 14 ++++++++++++++ 4 files changed, 60 insertions(+), 1 deletion(-) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql index 4eb88bd9e..3586ff63a 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql @@ -7,4 +7,19 @@ $$ LANGUAGE sql IMMUTABLE; ALTER TABLE IF EXISTS public.tenants ADD COLUMN IF NOT EXISTS last_telemetry bigint NOT NULL DEFAULT CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT); + +CREATE TABLE IF NOT EXISTS sessions_notes +( + note_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + message text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL, + deleted_at timestamp without time zone NULL DEFAULT NULL, + tags text[] NOT NULL DEFAULT '{}', + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + timestamp integer NOT NULL DEFAULT -1, + is_public boolean NOT NULL DEFAULT FALSE +); + COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 2be29136b..792003dab 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -129,7 +129,8 @@ $$ ('user_viewed_errors'), ('user_viewed_sessions'), ('users'), - ('webhooks')) + ('webhooks'), + ('sessions_notes')) select bool_and(exists(select * from information_schema.tables t where table_schema = 'public' @@ -857,6 +858,20 @@ $$ FOR EACH ROW EXECUTE PROCEDURE notify_alert(); + CREATE TABLE IF NOT EXISTS sessions_notes + ( + note_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + message text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL, + deleted_at timestamp without time zone NULL DEFAULT NULL, + tags text[] NOT NULL DEFAULT '{}', + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + timestamp integer NOT NULL DEFAULT -1, + is_public boolean NOT NULL DEFAULT FALSE + ); + RAISE NOTICE 'Created missing public schema tables'; END IF; END; diff --git a/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql b/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql index 57deb548d..4ce5009a3 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql @@ -7,4 +7,19 @@ $$ LANGUAGE sql IMMUTABLE; ALTER TABLE IF EXISTS public.tenants ADD COLUMN IF NOT EXISTS last_telemetry bigint NOT NULL DEFAULT CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT); + +CREATE TABLE IF NOT EXISTS sessions_notes +( + note_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + message text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL, + deleted_at timestamp without time zone NULL DEFAULT NULL, + tags text[] NOT NULL DEFAULT '{}', + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + timestamp integer NOT NULL DEFAULT -1, + is_public boolean NOT NULL DEFAULT FALSE +); + COMMIT; \ No newline at end of file diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index ad435348e..9e3a0f924 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -1000,6 +1000,20 @@ $$ FOR EACH ROW EXECUTE PROCEDURE notify_alert(); + CREATE TABLE sessions_notes + ( + note_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + message text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL, + deleted_at timestamp without time zone NULL DEFAULT NULL, + tags text[] NOT NULL DEFAULT '{}', + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + timestamp integer NOT NULL DEFAULT -1, + is_public boolean NOT NULL DEFAULT FALSE + ); + raise notice 'DB created'; END IF; END; From 56ed06ed17986c1a2f59be65ef05ade28a8f82ab Mon Sep 17 00:00:00 2001 From: Alexander Date: Wed, 28 Sep 2022 13:41:50 +0200 Subject: [PATCH 054/592] Message processing refactoring (#743) * feat(backend): refactored message processing logic and cleaned up previous changes --- backend/cmd/assets/main.go | 60 +- backend/cmd/db/main.go | 106 +- backend/cmd/ender/main.go | 31 +- backend/cmd/heuristics/main.go | 26 +- backend/cmd/integrations/main.go | 3 +- backend/cmd/sink/main.go | 105 +- backend/cmd/storage/main.go | 27 +- backend/internal/db/datasaver/messages.go | 3 +- backend/internal/db/datasaver/stats.go | 2 +- backend/internal/http/router/handlers-ios.go | 5 +- backend/internal/http/router/handlers-web.go | 2 +- backend/internal/sessionender/ender.go | 7 +- backend/internal/sink/assetscache/assets.go | 17 +- backend/pkg/log/queue.go | 23 +- backend/pkg/messages/batch.go | 197 -- backend/pkg/messages/extra.go | 56 - backend/pkg/messages/facade.go | 5 - backend/pkg/messages/filters.go | 2 +- backend/pkg/messages/iterator.go | 184 ++ backend/pkg/messages/message.go | 79 +- backend/pkg/messages/messages.go | 2301 +++++++------- backend/pkg/messages/raw.go | 7 + backend/pkg/messages/read-message.go | 2966 +++++++++--------- backend/pkg/queue/import.go | 5 +- backend/pkg/queue/messages.go | 12 - backend/pkg/queue/types/types.go | 21 +- backend/pkg/redisstream/consumer.go | 42 +- backend/pkg/sessions/builder.go | 1 + backend/pkg/sessions/builderMap.go | 5 +- ee/backend/internal/db/datasaver/messages.go | 3 +- ee/backend/internal/db/datasaver/stats.go | 2 +- ee/backend/pkg/failover/failover.go | 17 +- ee/backend/pkg/kafka/consumer.go | 39 +- ee/backend/pkg/queue/import.go | 5 +- mobs/messages.rb | 5 + mobs/run.rb | 2 +- 36 files changed, 3221 insertions(+), 3152 deletions(-) delete mode 100644 backend/pkg/messages/batch.go delete mode 100644 backend/pkg/messages/extra.go delete mode 100644 backend/pkg/messages/facade.go create mode 100644 backend/pkg/messages/iterator.go delete mode 100644 backend/pkg/queue/messages.go diff --git a/backend/cmd/assets/main.go b/backend/cmd/assets/main.go index b81ff9b5a..6f428034c 100644 --- a/backend/cmd/assets/main.go +++ b/backend/cmd/assets/main.go @@ -3,7 +3,6 @@ package main import ( "context" "log" - "openreplay/backend/pkg/queue/types" "os" "os/signal" "syscall" @@ -31,40 +30,30 @@ func main() { log.Printf("can't create assets_total metric: %s", err) } - consumer := queue.NewMessageConsumer( + msgHandler := func(msg messages.Message) { + switch m := msg.(type) { + case *messages.AssetCache: + cacher.CacheURL(m.SessionID(), m.URL) + totalAssets.Add(context.Background(), 1) + case *messages.ErrorEvent: + if m.Source != "js_exception" { + return + } + sourceList, err := assets.ExtractJSExceptionSources(&m.Payload) + if err != nil { + log.Printf("Error on source extraction: %v", err) + return + } + for _, source := range sourceList { + cacher.CacheJSFile(source) + } + } + } + + msgConsumer := queue.NewConsumer( cfg.GroupCache, []string{cfg.TopicCache}, - func(sessionID uint64, iter messages.Iterator, meta *types.Meta) { - for iter.Next() { - if iter.Type() == messages.MsgAssetCache { - m := iter.Message().Decode() - if m == nil { - return - } - msg := m.(*messages.AssetCache) - cacher.CacheURL(sessionID, msg.URL) - totalAssets.Add(context.Background(), 1) - } else if iter.Type() == messages.MsgErrorEvent { - m := iter.Message().Decode() - if m == nil { - return - } - msg := m.(*messages.ErrorEvent) - if msg.Source != "js_exception" { - continue - } - sourceList, err := assets.ExtractJSExceptionSources(&msg.Payload) - if err != nil { - log.Printf("Error on source extraction: %v", err) - continue - } - for _, source := range sourceList { - cacher.CacheJSFile(source) - } - } - } - iter.Close() - }, + messages.NewMessageIterator(msgHandler, []int{messages.MsgAssetCache, messages.MsgErrorEvent}, true), true, cfg.MessageSizeLimit, ) @@ -79,15 +68,14 @@ func main() { select { case sig := <-sigchan: log.Printf("Caught signal %v: terminating\n", sig) - consumer.Close() + msgConsumer.Close() os.Exit(0) case err := <-cacher.Errors: log.Printf("Error while caching: %v", err) - // TODO: notify user case <-tick: cacher.UpdateTimeouts() default: - if err := consumer.ConsumeNext(); err != nil { + if err := msgConsumer.ConsumeNext(); err != nil { log.Fatalf("Error on consumption: %v", err) } } diff --git a/backend/cmd/db/main.go b/backend/cmd/db/main.go index a807cc253..89fb7ce33 100644 --- a/backend/cmd/db/main.go +++ b/backend/cmd/db/main.go @@ -45,10 +45,6 @@ func main() { // Create handler's aggregator builderMap := sessions.NewBuilderMap(handlersFabric) - keepMessage := func(tp int) bool { - return tp == messages.MsgMetadata || tp == messages.MsgIssueEvent || tp == messages.MsgSessionStart || tp == messages.MsgSessionEnd || tp == messages.MsgUserID || tp == messages.MsgUserAnonymousID || tp == messages.MsgCustomEvent || tp == messages.MsgClickEvent || tp == messages.MsgInputEvent || tp == messages.MsgPageEvent || tp == messages.MsgErrorEvent || tp == messages.MsgFetchEvent || tp == messages.MsgGraphQLEvent || tp == messages.MsgIntegrationEvent || tp == messages.MsgPerformanceTrackAggr || tp == messages.MsgResourceEvent || tp == messages.MsgLongTask || tp == messages.MsgJSException || tp == messages.MsgResourceTiming || tp == messages.MsgRawCustomEvent || tp == messages.MsgCustomIssue || tp == messages.MsgFetch || tp == messages.MsgGraphQL || tp == messages.MsgStateAction || tp == messages.MsgSetInputTarget || tp == messages.MsgSetInputValue || tp == messages.MsgCreateDocument || tp == messages.MsgMouseClick || tp == messages.MsgSetPageLocation || tp == messages.MsgPageLoadTiming || tp == messages.MsgPageRenderTiming - } - var producer types.Producer = nil if cfg.UseQuickwit { producer = queue.NewProducer(cfg.MessageSizeLimit, true) @@ -60,69 +56,67 @@ func main() { saver.InitStats() statsLogger := logger.NewQueueStats(cfg.LoggerTimeout) + msgFilter := []int{messages.MsgMetadata, messages.MsgIssueEvent, messages.MsgSessionStart, messages.MsgSessionEnd, + messages.MsgUserID, messages.MsgUserAnonymousID, messages.MsgCustomEvent, messages.MsgClickEvent, + messages.MsgInputEvent, messages.MsgPageEvent, messages.MsgErrorEvent, messages.MsgFetchEvent, + messages.MsgGraphQLEvent, messages.MsgIntegrationEvent, messages.MsgPerformanceTrackAggr, + messages.MsgResourceEvent, messages.MsgLongTask, messages.MsgJSException, messages.MsgResourceTiming, + messages.MsgRawCustomEvent, messages.MsgCustomIssue, messages.MsgFetch, messages.MsgGraphQL, + messages.MsgStateAction, messages.MsgSetInputTarget, messages.MsgSetInputValue, messages.MsgCreateDocument, + messages.MsgMouseClick, messages.MsgSetPageLocation, messages.MsgPageLoadTiming, messages.MsgPageRenderTiming} + // Handler logic - handler := func(sessionID uint64, iter messages.Iterator, meta *types.Meta) { - statsLogger.Collect(sessionID, meta) + msgHandler := func(msg messages.Message) { + statsLogger.Collect(msg) // TODO: carefully check message meta and batch meta confusion situation - for iter.Next() { - if !keepMessage(iter.Type()) { - continue + // Just save session data into db without additional checks + if err := saver.InsertMessage(msg); err != nil { + if !postgres.IsPkeyViolation(err) { + log.Printf("Message Insertion Error %v, SessionID: %v, Message: %v", err, msg.SessionID(), msg) } - msg := iter.Message().Decode() - if msg == nil { - return - } - - // Just save session data into db without additional checks - if err := saver.InsertMessage(sessionID, msg); err != nil { - if !postgres.IsPkeyViolation(err) { - log.Printf("Message Insertion Error %v, SessionID: %v, Message: %v", err, sessionID, msg) - } - return - } - - session, err := pg.GetSession(sessionID) - if session == nil { - if err != nil && !errors.Is(err, cache.NilSessionInCacheError) { - log.Printf("Error on session retrieving from cache: %v, SessionID: %v, Message: %v", err, sessionID, msg) - } - return - } - - // Save statistics to db - err = saver.InsertStats(session, msg) - if err != nil { - log.Printf("Stats Insertion Error %v; Session: %v, Message: %v", err, session, msg) - } - - // Handle heuristics and save to temporary queue in memory - builderMap.HandleMessage(sessionID, msg, msg.Meta().Index) - - // Process saved heuristics messages as usual messages above in the code - builderMap.IterateSessionReadyMessages(sessionID, func(msg messages.Message) { - if err := saver.InsertMessage(sessionID, msg); err != nil { - if !postgres.IsPkeyViolation(err) { - log.Printf("Message Insertion Error %v; Session: %v, Message %v", err, session, msg) - } - return - } - - if err := saver.InsertStats(session, msg); err != nil { - log.Printf("Stats Insertion Error %v; Session: %v, Message %v", err, session, msg) - } - }) + return } - iter.Close() + + session, err := pg.GetSession(msg.SessionID()) + if session == nil { + if err != nil && !errors.Is(err, cache.NilSessionInCacheError) { + log.Printf("Error on session retrieving from cache: %v, SessionID: %v, Message: %v", err, msg.SessionID(), msg) + } + return + } + + // Save statistics to db + err = saver.InsertStats(session, msg) + if err != nil { + log.Printf("Stats Insertion Error %v; Session: %v, Message: %v", err, session, msg) + } + + // Handle heuristics and save to temporary queue in memory + builderMap.HandleMessage(msg) + + // Process saved heuristics messages as usual messages above in the code + builderMap.IterateSessionReadyMessages(msg.SessionID(), func(msg messages.Message) { + if err := saver.InsertMessage(msg); err != nil { + if !postgres.IsPkeyViolation(err) { + log.Printf("Message Insertion Error %v; Session: %v, Message %v", err, session, msg) + } + return + } + + if err := saver.InsertStats(session, msg); err != nil { + log.Printf("Stats Insertion Error %v; Session: %v, Message %v", err, session, msg) + } + }) } // Init consumer - consumer := queue.NewMessageConsumer( + consumer := queue.NewConsumer( cfg.GroupDB, []string{ cfg.TopicRawWeb, cfg.TopicAnalytics, }, - handler, + messages.NewMessageIterator(msgHandler, msgFilter, true), false, cfg.MessageSizeLimit, ) @@ -146,7 +140,7 @@ func main() { pgDur := time.Now().Sub(start).Milliseconds() start = time.Now() - if err := saver.CommitStats(consumer.HasFirstPartition()); err != nil { + if err := saver.CommitStats(); err != nil { log.Printf("Error on stats commit: %v", err) } chDur := time.Now().Sub(start).Milliseconds() diff --git a/backend/cmd/ender/main.go b/backend/cmd/ender/main.go index a2dafa689..b698bb13b 100644 --- a/backend/cmd/ender/main.go +++ b/backend/cmd/ender/main.go @@ -2,7 +2,6 @@ package main import ( "log" - "openreplay/backend/pkg/queue/types" "os" "os/signal" "syscall" @@ -38,24 +37,24 @@ func main() { return } producer := queue.NewProducer(cfg.MessageSizeLimit, true) - consumer := queue.NewMessageConsumer( + + msgHandler := func(msg messages.Message) { + if msg.TypeID() == messages.MsgSessionStart || msg.TypeID() == messages.MsgSessionEnd { + return + } + if msg.Meta().Timestamp == 0 { + log.Printf("ZERO TS, sessID: %d, msgType: %d", msg.Meta().SessionID(), msg.TypeID()) + } + statsLogger.Collect(msg) + sessions.UpdateSession(msg) //TODO: recheck timestamps(sessionID, meta.Timestamp, iter.Message().Meta().Timestamp) + } + + consumer := queue.NewConsumer( cfg.GroupEnder, []string{ cfg.TopicRawWeb, }, - func(sessionID uint64, iter messages.Iterator, meta *types.Meta) { - for iter.Next() { - if iter.Type() == messages.MsgSessionStart || iter.Type() == messages.MsgSessionEnd { - continue - } - if iter.Message().Meta().Timestamp == 0 { - log.Printf("ZERO TS, sessID: %d, msgType: %d", sessionID, iter.Type()) - } - statsLogger.Collect(sessionID, meta) - sessions.UpdateSession(sessionID, meta.Timestamp, iter.Message().Meta().Timestamp) - } - iter.Close() - }, + messages.NewMessageIterator(msgHandler, nil, false), false, cfg.MessageSizeLimit, ) @@ -94,7 +93,7 @@ func main() { currDuration, newDuration) return true } - if err := producer.Produce(cfg.TopicRawWeb, sessionID, messages.Encode(msg)); err != nil { + if err := producer.Produce(cfg.TopicRawWeb, sessionID, msg.Encode()); err != nil { log.Printf("can't send sessionEnd to topic: %s; sessID: %d", err, sessionID) return false } diff --git a/backend/cmd/heuristics/main.go b/backend/cmd/heuristics/main.go index be27a86bd..82510fdb7 100644 --- a/backend/cmd/heuristics/main.go +++ b/backend/cmd/heuristics/main.go @@ -2,7 +2,6 @@ package main import ( "log" - "openreplay/backend/pkg/queue/types" "os" "os/signal" "syscall" @@ -47,25 +46,18 @@ func main() { // Init producer and consumer for data bus producer := queue.NewProducer(cfg.MessageSizeLimit, true) - consumer := queue.NewMessageConsumer( + + msgHandler := func(msg messages.Message) { + statsLogger.Collect(msg) + builderMap.HandleMessage(msg) //(sessionID, msg, iter.Message().Meta().Index) + } + + consumer := queue.NewConsumer( cfg.GroupHeuristics, []string{ cfg.TopicRawWeb, }, - func(sessionID uint64, iter messages.Iterator, meta *types.Meta) { - var lastMessageID uint64 - for iter.Next() { - statsLogger.Collect(sessionID, meta) - msg := iter.Message().Decode() - if msg == nil { - log.Printf("failed batch, sess: %d, lastIndex: %d", sessionID, lastMessageID) - continue - } - lastMessageID = msg.Meta().Index - builderMap.HandleMessage(sessionID, msg, iter.Message().Meta().Index) - } - iter.Close() - }, + messages.NewMessageIterator(msgHandler, nil, true), false, cfg.MessageSizeLimit, ) @@ -86,7 +78,7 @@ func main() { os.Exit(0) case <-tick: builderMap.IterateReadyMessages(func(sessionID uint64, readyMsg messages.Message) { - producer.Produce(cfg.TopicAnalytics, sessionID, messages.Encode(readyMsg)) + producer.Produce(cfg.TopicAnalytics, sessionID, readyMsg.Encode()) }) producer.Flush(cfg.ProducerTimeout) consumer.Commit() diff --git a/backend/cmd/integrations/main.go b/backend/cmd/integrations/main.go index 86490c6ab..9bfa4d7a2 100644 --- a/backend/cmd/integrations/main.go +++ b/backend/cmd/integrations/main.go @@ -13,7 +13,6 @@ import ( "openreplay/backend/pkg/db/postgres" "openreplay/backend/pkg/intervals" - "openreplay/backend/pkg/messages" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/token" ) @@ -84,7 +83,7 @@ func main() { } sessionID = sessData.ID } - producer.Produce(cfg.TopicAnalytics, sessionID, messages.Encode(event.IntegrationEvent)) + producer.Produce(cfg.TopicAnalytics, sessionID, event.IntegrationEvent.Encode()) case err := <-manager.Errors: log.Printf("Integration error: %v\n", err) case i := <-manager.RequestDataUpdates: diff --git a/backend/cmd/sink/main.go b/backend/cmd/sink/main.go index bd7fddf20..0b82c3130 100644 --- a/backend/cmd/sink/main.go +++ b/backend/cmd/sink/main.go @@ -3,7 +3,6 @@ package main import ( "context" "log" - "openreplay/backend/pkg/queue/types" "os" "os/signal" "syscall" @@ -13,7 +12,7 @@ import ( "openreplay/backend/internal/sink/assetscache" "openreplay/backend/internal/sink/oswriter" "openreplay/backend/internal/storage" - . "openreplay/backend/pkg/messages" + "openreplay/backend/pkg/messages" "openreplay/backend/pkg/monitoring" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/url/assets" @@ -51,64 +50,58 @@ func main() { log.Printf("can't create messages_size metric: %s", err) } - consumer := queue.NewMessageConsumer( + msgHandler := func(msg messages.Message) { + // [METRICS] Increase the number of processed messages + totalMessages.Add(context.Background(), 1) + + // Send SessionEnd trigger to storage service + if msg.TypeID() == messages.MsgSessionEnd { + if err := producer.Produce(cfg.TopicTrigger, msg.SessionID(), msg.Encode()); err != nil { + log.Printf("can't send SessionEnd to trigger topic: %s; sessID: %d", err, msg.SessionID()) + } + return + } + + // Process assets + if msg.TypeID() == messages.MsgSetNodeAttributeURLBased || + msg.TypeID() == messages.MsgSetCSSDataURLBased || + msg.TypeID() == messages.MsgCSSInsertRuleURLBased || + msg.TypeID() == messages.MsgAdoptedSSReplaceURLBased || + msg.TypeID() == messages.MsgAdoptedSSInsertRuleURLBased { + msg = assetMessageHandler.ParseAssets(msg.Decode()) // TODO: filter type only once (use iterator inside or bring ParseAssets out here). + } + + // Filter message + if !messages.IsReplayerType(msg.TypeID()) { + return + } + + // If message timestamp is empty, use at least ts of session start + ts := msg.Meta().Timestamp + if ts == 0 { + log.Printf("zero ts; sessID: %d, msgType: %d", msg.SessionID(), msg.TypeID()) + } else { + // Log ts of last processed message + counter.Update(msg.SessionID(), time.UnixMilli(ts)) + } + + // Write encoded message with index to session file + data := msg.EncodeWithIndex() + if err := writer.Write(msg.SessionID(), data); err != nil { + log.Printf("Writer error: %v\n", err) + } + + // [METRICS] Increase the number of written to the files messages and the message size + messageSize.Record(context.Background(), float64(len(data))) + savedMessages.Add(context.Background(), 1) + } + + consumer := queue.NewConsumer( cfg.GroupSink, []string{ cfg.TopicRawWeb, }, - func(sessionID uint64, iter Iterator, meta *types.Meta) { - for iter.Next() { - // [METRICS] Increase the number of processed messages - totalMessages.Add(context.Background(), 1) - - // Send SessionEnd trigger to storage service - if iter.Type() == MsgSessionEnd { - if err := producer.Produce(cfg.TopicTrigger, sessionID, iter.Message().Encode()); err != nil { - log.Printf("can't send SessionEnd to trigger topic: %s; sessID: %d", err, sessionID) - } - continue - } - - msg := iter.Message() - // Process assets - if iter.Type() == MsgSetNodeAttributeURLBased || - iter.Type() == MsgSetCSSDataURLBased || - iter.Type() == MsgCSSInsertRuleURLBased || - iter.Type() == MsgAdoptedSSReplaceURLBased || - iter.Type() == MsgAdoptedSSInsertRuleURLBased { - m := msg.Decode() - if m == nil { - return - } - msg = assetMessageHandler.ParseAssets(sessionID, m) // TODO: filter type only once (use iterator inide or bring ParseAssets out here). - } - - // Filter message - if !IsReplayerType(msg.TypeID()) { - continue - } - - // If message timestamp is empty, use at least ts of session start - ts := msg.Meta().Timestamp - if ts == 0 { - log.Printf("zero ts; sessID: %d, msgType: %d", sessionID, iter.Type()) - } else { - // Log ts of last processed message - counter.Update(sessionID, time.UnixMilli(ts)) - } - - // Write encoded message with index to session file - data := msg.EncodeWithIndex() - if err := writer.Write(sessionID, data); err != nil { - log.Printf("Writer error: %v\n", err) - } - - // [METRICS] Increase the number of written to the files messages and the message size - messageSize.Record(context.Background(), float64(len(data))) - savedMessages.Add(context.Background(), 1) - } - iter.Close() - }, + messages.NewMessageIterator(msgHandler, nil, false), false, cfg.MessageSizeLimit, ) diff --git a/backend/cmd/storage/main.go b/backend/cmd/storage/main.go index b3848c5de..ad03c1129 100644 --- a/backend/cmd/storage/main.go +++ b/backend/cmd/storage/main.go @@ -2,7 +2,6 @@ package main import ( "log" - "openreplay/backend/pkg/queue/types" "os" "os/signal" "strconv" @@ -38,24 +37,24 @@ func main() { log.Fatalf("can't init sessionFinder module: %s", err) } - consumer := queue.NewMessageConsumer( + consumer := queue.NewConsumer( cfg.GroupStorage, []string{ cfg.TopicTrigger, }, - func(sessionID uint64, iter messages.Iterator, meta *types.Meta) { - for iter.Next() { - if iter.Type() == messages.MsgSessionEnd { - msg := iter.Message().Decode().(*messages.SessionEnd) - if err := srv.UploadKey(strconv.FormatUint(sessionID, 10), 5); err != nil { - log.Printf("can't find session: %d", sessionID) - sessionFinder.Find(sessionID, msg.Timestamp) - } - // Log timestamp of last processed session - counter.Update(sessionID, time.UnixMilli(meta.Timestamp)) + messages.NewMessageIterator( + func(msg messages.Message) { + m := msg.(*messages.SessionEnd) + if err := srv.UploadKey(strconv.FormatUint(msg.SessionID(), 10), 5); err != nil { + log.Printf("can't find session: %d", msg.SessionID()) + sessionFinder.Find(msg.SessionID(), m.Timestamp) } - } - }, + // Log timestamp of last processed session + counter.Update(msg.SessionID(), time.UnixMilli(msg.Meta().Batch().Timestamp())) + }, + []int{messages.MsgSessionEnd}, + true, + ), true, cfg.MessageSizeLimit, ) diff --git a/backend/internal/db/datasaver/messages.go b/backend/internal/db/datasaver/messages.go index 702c2f210..bf1f82e6b 100644 --- a/backend/internal/db/datasaver/messages.go +++ b/backend/internal/db/datasaver/messages.go @@ -5,7 +5,8 @@ import ( . "openreplay/backend/pkg/messages" ) -func (mi *Saver) InsertMessage(sessionID uint64, msg Message) error { +func (mi *Saver) InsertMessage(msg Message) error { + sessionID := msg.SessionID() switch m := msg.(type) { // Common case *Metadata: diff --git a/backend/internal/db/datasaver/stats.go b/backend/internal/db/datasaver/stats.go index 17028ca5c..26efe51b5 100644 --- a/backend/internal/db/datasaver/stats.go +++ b/backend/internal/db/datasaver/stats.go @@ -22,6 +22,6 @@ func (si *Saver) InsertStats(session *Session, msg Message) error { return nil } -func (si *Saver) CommitStats(optimize bool) error { +func (si *Saver) CommitStats() error { return nil } diff --git a/backend/internal/http/router/handlers-ios.go b/backend/internal/http/router/handlers-ios.go index 43fd0a7a9..645c16552 100644 --- a/backend/internal/http/router/handlers-ios.go +++ b/backend/internal/http/router/handlers-ios.go @@ -74,7 +74,7 @@ func (e *Router) startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) country := e.services.GeoIP.ExtractISOCodeFromHTTPRequest(r) // The difference with web is mostly here: - e.services.Producer.Produce(e.cfg.TopicRawIOS, tokenData.ID, Encode(&IOSSessionStart{ + sessStart := &IOSSessionStart{ Timestamp: req.Timestamp, ProjectID: uint64(p.ProjectID), TrackerVersion: req.TrackerVersion, @@ -85,7 +85,8 @@ func (e *Router) startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) UserDevice: ios.MapIOSDevice(req.UserDevice), UserDeviceType: ios.GetIOSDeviceType(req.UserDevice), UserCountry: country, - })) + } + e.services.Producer.Produce(e.cfg.TopicRawIOS, tokenData.ID, sessStart.Encode()) } ResponseWithJSON(w, &StartIOSSessionResponse{ diff --git a/backend/internal/http/router/handlers-web.go b/backend/internal/http/router/handlers-web.go index 04728de4e..6cc095d40 100644 --- a/backend/internal/http/router/handlers-web.go +++ b/backend/internal/http/router/handlers-web.go @@ -125,7 +125,7 @@ func (e *Router) startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) } // Send sessionStart message to kafka - if err := e.services.Producer.Produce(e.cfg.TopicRawWeb, tokenData.ID, Encode(sessionStart)); err != nil { + if err := e.services.Producer.Produce(e.cfg.TopicRawWeb, tokenData.ID, sessionStart.Encode()); err != nil { log.Printf("can't send session start: %s", err) } } diff --git a/backend/internal/sessionender/ender.go b/backend/internal/sessionender/ender.go index 2f8e1e1ba..07ccf19af 100644 --- a/backend/internal/sessionender/ender.go +++ b/backend/internal/sessionender/ender.go @@ -5,6 +5,7 @@ import ( "fmt" "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "log" + "openreplay/backend/pkg/messages" "openreplay/backend/pkg/monitoring" "time" ) @@ -52,9 +53,11 @@ func New(metrics *monitoring.Metrics, timeout int64, parts int) (*SessionEnder, } // UpdateSession save timestamp for new sessions and update for existing sessions -func (se *SessionEnder) UpdateSession(sessionID uint64, timestamp, msgTimestamp int64) { +func (se *SessionEnder) UpdateSession(msg messages.Message) { + sessionID := msg.Meta().SessionID() + currTS := msg.Meta().Batch().Timestamp() + msgTimestamp := msg.Meta().Timestamp localTS := time.Now().UnixMilli() - currTS := timestamp if currTS == 0 { log.Printf("got empty timestamp for sessionID: %d", sessionID) return diff --git a/backend/internal/sink/assetscache/assets.go b/backend/internal/sink/assetscache/assets.go index 8218af4e6..478141453 100644 --- a/backend/internal/sink/assetscache/assets.go +++ b/backend/internal/sink/assetscache/assets.go @@ -22,14 +22,14 @@ func New(cfg *sink.Config, rewriter *assets.Rewriter, producer types.Producer) * } } -func (e *AssetsCache) ParseAssets(sessID uint64, msg messages.Message) messages.Message { +func (e *AssetsCache) ParseAssets(msg messages.Message) messages.Message { switch m := msg.(type) { case *messages.SetNodeAttributeURLBased: if m.Name == "src" || m.Name == "href" { newMsg := &messages.SetNodeAttribute{ ID: m.ID, Name: m.Name, - Value: e.handleURL(sessID, m.BaseURL, m.Value), + Value: e.handleURL(m.SessionID(), m.BaseURL, m.Value), } newMsg.SetMeta(msg.Meta()) return newMsg @@ -37,7 +37,7 @@ func (e *AssetsCache) ParseAssets(sessID uint64, msg messages.Message) messages. newMsg := &messages.SetNodeAttribute{ ID: m.ID, Name: m.Name, - Value: e.handleCSS(sessID, m.BaseURL, m.Value), + Value: e.handleCSS(m.SessionID(), m.BaseURL, m.Value), } newMsg.SetMeta(msg.Meta()) return newMsg @@ -45,7 +45,7 @@ func (e *AssetsCache) ParseAssets(sessID uint64, msg messages.Message) messages. case *messages.SetCSSDataURLBased: newMsg := &messages.SetCSSData{ ID: m.ID, - Data: e.handleCSS(sessID, m.BaseURL, m.Data), + Data: e.handleCSS(m.SessionID(), m.BaseURL, m.Data), } newMsg.SetMeta(msg.Meta()) return newMsg @@ -53,14 +53,14 @@ func (e *AssetsCache) ParseAssets(sessID uint64, msg messages.Message) messages. newMsg := &messages.CSSInsertRule{ ID: m.ID, Index: m.Index, - Rule: e.handleCSS(sessID, m.BaseURL, m.Rule), + Rule: e.handleCSS(m.SessionID(), m.BaseURL, m.Rule), } newMsg.SetMeta(msg.Meta()) return newMsg case *messages.AdoptedSSReplaceURLBased: newMsg := &messages.AdoptedSSReplace{ SheetID: m.SheetID, - Text: e.handleCSS(sessID, m.BaseURL, m.Text), + Text: e.handleCSS(m.SessionID(), m.BaseURL, m.Text), } newMsg.SetMeta(msg.Meta()) return newMsg @@ -68,7 +68,7 @@ func (e *AssetsCache) ParseAssets(sessID uint64, msg messages.Message) messages. newMsg := &messages.AdoptedSSInsertRule{ SheetID: m.SheetID, Index: m.Index, - Rule: e.handleCSS(sessID, m.BaseURL, m.Rule), + Rule: e.handleCSS(m.SessionID(), m.BaseURL, m.Rule), } newMsg.SetMeta(msg.Meta()) return newMsg @@ -78,10 +78,11 @@ func (e *AssetsCache) ParseAssets(sessID uint64, msg messages.Message) messages. func (e *AssetsCache) sendAssetForCache(sessionID uint64, baseURL string, relativeURL string) { if fullURL, cacheable := assets.GetFullCachableURL(baseURL, relativeURL); cacheable { + assetMessage := &messages.AssetCache{URL: fullURL} if err := e.producer.Produce( e.cfg.TopicCache, sessionID, - messages.Encode(&messages.AssetCache{URL: fullURL}), + assetMessage.Encode(), ); err != nil { log.Printf("can't send asset to cache topic, sessID: %d, err: %s", sessionID, err) } diff --git a/backend/pkg/log/queue.go b/backend/pkg/log/queue.go index ced815bd2..ce1f5f764 100644 --- a/backend/pkg/log/queue.go +++ b/backend/pkg/log/queue.go @@ -5,8 +5,7 @@ import ( "log" "time" - "openreplay/backend/pkg/queue/types" - //"openreplay/backend/pkg/env" + "openreplay/backend/pkg/messages" ) type partitionStats struct { @@ -18,15 +17,15 @@ type partitionStats struct { } // Update partition statistic -func (prt *partitionStats) update(m *types.Meta) { - if prt.maxts < m.Timestamp { - prt.maxts = m.Timestamp +func (prt *partitionStats) update(m *messages.BatchInfo) { + if prt.maxts < m.Timestamp() { + prt.maxts = m.Timestamp() } - if prt.mints > m.Timestamp || prt.mints == 0 { - prt.mints = m.Timestamp + if prt.mints > m.Timestamp() || prt.mints == 0 { + prt.mints = m.Timestamp() } - prt.lastts = m.Timestamp - prt.lastID = m.ID + prt.lastts = m.Timestamp() + prt.lastID = m.ID() prt.count += 1 } @@ -43,14 +42,14 @@ func NewQueueStats(sec int) *queueStats { } // Collect writes new data to partition statistic -func (qs *queueStats) Collect(sessionID uint64, m *types.Meta) { - prti := int32(sessionID % 16) // TODO use GetKeyPartition from kafka/key.go +func (qs *queueStats) Collect(msg messages.Message) { + prti := int32(msg.SessionID() % 16) // TODO use GetKeyPartition from kafka/key.go prt, ok := qs.prts[prti] if !ok { qs.prts[prti] = &partitionStats{} prt = qs.prts[prti] } - prt.update(m) + prt.update(msg.Meta().Batch()) select { case <-qs.tick: diff --git a/backend/pkg/messages/batch.go b/backend/pkg/messages/batch.go deleted file mode 100644 index 887e5ddb3..000000000 --- a/backend/pkg/messages/batch.go +++ /dev/null @@ -1,197 +0,0 @@ -package messages - -import ( - "bytes" - "io" - "log" - "strings" -) - -type Iterator interface { - Next() bool // Return true if we have next message - Type() int // Return type of the next message - Message() Message // Return raw or decoded message - Close() -} - -type iteratorImpl struct { - data *bytes.Reader - index uint64 - timestamp int64 - version uint64 - msgType uint64 - msgSize uint64 - canSkip bool - msg Message - url string -} - -func NewIterator(data []byte) Iterator { - return &iteratorImpl{ - data: bytes.NewReader(data), - } -} - -func (i *iteratorImpl) Next() bool { - if i.canSkip { - if _, err := i.data.Seek(int64(i.msgSize), io.SeekCurrent); err != nil { - log.Printf("seek err: %s", err) - return false - } - } - i.canSkip = false - - var err error - i.msgType, err = ReadUint(i.data) - if err != nil { - if err == io.EOF { - return false - } - log.Printf("can't read message type: %s", err) - return false - } - - if i.version > 0 && messageHasSize(i.msgType) { - // Read message size if it is a new protocol version - i.msgSize, err = ReadSize(i.data) - if err != nil { - log.Printf("can't read message size: %s", err) - return false - } - i.msg = &RawMessage{ - tp: i.msgType, - size: i.msgSize, - meta: &message{}, - reader: i.data, - skipped: &i.canSkip, - } - i.canSkip = true - } else { - i.msg, err = ReadMessage(i.msgType, i.data) - if err == io.EOF { - return false - } else if err != nil { - if strings.HasPrefix(err.Error(), "Unknown message code:") { - code := strings.TrimPrefix(err.Error(), "Unknown message code: ") - i.msg, err = DecodeExtraMessage(code, i.data) - if err != nil { - log.Printf("can't decode msg: %s", err) - return false - } - } else { - log.Printf("Batch Message decoding error on message with index %v, err: %s", i.index, err) - return false - } - } - i.msg = transformDeprecated(i.msg) - } - - // Process meta information - isBatchMeta := false - switch i.msgType { - case MsgBatchMetadata: - if i.index != 0 { // Might be several 0-0 BatchMeta in a row without an error though - log.Printf("Batch Metadata found at the end of the batch") - return false - } - msg := i.msg.Decode() - if msg == nil { - return false - } - m := msg.(*BatchMetadata) - i.index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha) - i.timestamp = m.Timestamp - i.version = m.Version - i.url = m.Url - isBatchMeta = true - if i.version > 1 { - log.Printf("incorrect batch version, skip current batch") - return false - } - case MsgBatchMeta: // Is not required to be present in batch since IOS doesn't have it (though we might change it) - if i.index != 0 { // Might be several 0-0 BatchMeta in a row without an error though - log.Printf("Batch Meta found at the end of the batch") - return false - } - msg := i.msg.Decode() - if msg == nil { - return false - } - m := msg.(*BatchMeta) - i.index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha) - i.timestamp = m.Timestamp - isBatchMeta = true - // continue readLoop - case MsgIOSBatchMeta: - if i.index != 0 { // Might be several 0-0 BatchMeta in a row without an error though - log.Printf("Batch Meta found at the end of the batch") - return false - } - msg := i.msg.Decode() - if msg == nil { - return false - } - m := msg.(*IOSBatchMeta) - i.index = m.FirstIndex - i.timestamp = int64(m.Timestamp) - isBatchMeta = true - // continue readLoop - case MsgTimestamp: - msg := i.msg.Decode() - if msg == nil { - return false - } - m := msg.(*Timestamp) - i.timestamp = int64(m.Timestamp) - // No skipping here for making it easy to encode back the same sequence of message - // continue readLoop - case MsgSessionStart: - msg := i.msg.Decode() - if msg == nil { - return false - } - m := msg.(*SessionStart) - i.timestamp = int64(m.Timestamp) - case MsgSessionEnd: - msg := i.msg.Decode() - if msg == nil { - return false - } - m := msg.(*SessionEnd) - i.timestamp = int64(m.Timestamp) - case MsgSetPageLocation: - msg := i.msg.Decode() - if msg == nil { - return false - } - m := msg.(*SetPageLocation) - i.url = m.URL - } - i.msg.Meta().Index = i.index - i.msg.Meta().Timestamp = i.timestamp - i.msg.Meta().Url = i.url - - if !isBatchMeta { // Without that indexes will be unique anyway, though shifted by 1 because BatchMeta is not counted in tracker - i.index++ - } - return true -} - -func (i *iteratorImpl) Type() int { - return int(i.msgType) -} - -func (i *iteratorImpl) Message() Message { - return i.msg -} - -func (i *iteratorImpl) Close() { - _, err := i.data.Seek(0, io.SeekEnd) - if err != nil { - log.Printf("can't set seek pointer at the end: %s", err) - } -} - -func messageHasSize(msgType uint64) bool { - return !(msgType == 80 || msgType == 81 || msgType == 82) -} diff --git a/backend/pkg/messages/extra.go b/backend/pkg/messages/extra.go deleted file mode 100644 index b2a57e2ad..000000000 --- a/backend/pkg/messages/extra.go +++ /dev/null @@ -1,56 +0,0 @@ -package messages - -import ( - "encoding/binary" - "fmt" - "io" -) - -type SessionSearch struct { - message - Timestamp uint64 - Partition uint64 -} - -func (msg *SessionSearch) Encode() []byte { - buf := make([]byte, 11) - buf[0] = 127 - p := 1 - p = WriteUint(msg.Timestamp, buf, p) - p = WriteUint(msg.Partition, buf, p) - return buf[:p] -} - -func (msg *SessionSearch) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - -func (msg *SessionSearch) Decode() Message { - return msg -} - -func (msg *SessionSearch) TypeID() int { - return 127 -} - -func DecodeExtraMessage(code string, reader io.Reader) (Message, error) { - var err error - if code != "127" { - return nil, fmt.Errorf("unknown message code: %s", code) - } - msg := &SessionSearch{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, fmt.Errorf("can't read message timestamp: %s", err) - } - if msg.Partition, err = ReadUint(reader); err != nil { - return nil, fmt.Errorf("can't read last partition: %s", err) - } - return msg, nil -} diff --git a/backend/pkg/messages/facade.go b/backend/pkg/messages/facade.go deleted file mode 100644 index ebc9e7983..000000000 --- a/backend/pkg/messages/facade.go +++ /dev/null @@ -1,5 +0,0 @@ -package messages - -func Encode(msg Message) []byte { - return msg.Encode() -} diff --git a/backend/pkg/messages/filters.go b/backend/pkg/messages/filters.go index e79d1d987..1918dc397 100644 --- a/backend/pkg/messages/filters.go +++ b/backend/pkg/messages/filters.go @@ -2,7 +2,7 @@ package messages func IsReplayerType(id int) bool { - return 0 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 22 == id || 37 == id || 38 == id || 39 == id || 40 == id || 41 == id || 44 == id || 45 == id || 46 == id || 47 == id || 48 == id || 49 == id || 54 == id || 55 == id || 59 == id || 60 == id || 61 == id || 67 == id || 69 == id || 70 == id || 71 == id || 72 == id || 73 == id || 74 == id || 75 == id || 76 == id || 77 == id || 79 == id || 90 == id || 93 == id || 96 == id || 100 == id || 102 == id || 103 == id || 105 == id + return 0 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 22 == id || 37 == id || 38 == id || 39 == id || 40 == id || 41 == id || 44 == id || 45 == id || 46 == id || 47 == id || 48 == id || 49 == id || 54 == id || 55 == id || 59 == id || 60 == id || 61 == id || 67 == id || 69 == id || 70 == id || 71 == id || 72 == id || 73 == id || 74 == id || 75 == id || 76 == id || 77 == id || 79 == id || 127 == id || 90 == id || 93 == id || 96 == id || 100 == id || 102 == id || 103 == id || 105 == id } func IsIOSType(id int) bool { diff --git a/backend/pkg/messages/iterator.go b/backend/pkg/messages/iterator.go new file mode 100644 index 000000000..290cb7dc1 --- /dev/null +++ b/backend/pkg/messages/iterator.go @@ -0,0 +1,184 @@ +package messages + +import ( + "bytes" + "fmt" + "io" + "log" +) + +// MessageHandler processes one message using service logic +type MessageHandler func(Message) + +// MessageIterator iterates by all messages in batch +type MessageIterator interface { + Iterate(batchData []byte, batchInfo *BatchInfo) +} + +type messageIteratorImpl struct { + filter map[int]struct{} + preFilter map[int]struct{} + handler MessageHandler + autoDecode bool + version uint64 + size uint64 + canSkip bool + messageInfo *message + batchInfo *BatchInfo +} + +func NewMessageIterator(messageHandler MessageHandler, messageFilter []int, autoDecode bool) MessageIterator { + iter := &messageIteratorImpl{handler: messageHandler, autoDecode: autoDecode} + if len(messageFilter) != 0 { + filter := make(map[int]struct{}, len(messageFilter)) + for _, msgType := range messageFilter { + filter[msgType] = struct{}{} + } + iter.filter = filter + } + iter.preFilter = map[int]struct{}{ + MsgBatchMetadata: {}, MsgBatchMeta: {}, MsgTimestamp: {}, + MsgSessionStart: {}, MsgSessionEnd: {}, MsgSetPageLocation: {}} + return iter +} + +func (i *messageIteratorImpl) prepareVars(batchInfo *BatchInfo) { + i.batchInfo = batchInfo + i.messageInfo = &message{batch: batchInfo} + i.version = 0 + i.canSkip = false + i.size = 0 +} + +func (i *messageIteratorImpl) Iterate(batchData []byte, batchInfo *BatchInfo) { + // Prepare iterator before processing messages in batch + i.prepareVars(batchInfo) + + // Initialize batch reader + reader := bytes.NewReader(batchData) + + // Process until end of batch or parsing error + for { + // Increase message index (can be overwritten by batch info message) + i.messageInfo.Index++ + + if i.canSkip { + if _, err := reader.Seek(int64(i.size), io.SeekCurrent); err != nil { + log.Printf("seek err: %s", err) + return + } + } + i.canSkip = false + + // Read message type + msgType, err := ReadUint(reader) + if err != nil { + if err != io.EOF { + log.Printf("can't read message type: %s", err) + } + return + } + + var msg Message + // Read message body (and decode if protocol version less than 1) + if i.version > 0 && messageHasSize(msgType) { + // Read message size if it is a new protocol version + i.size, err = ReadSize(reader) + if err != nil { + log.Printf("can't read message size: %s", err) + return + } + msg = &RawMessage{ + tp: msgType, + size: i.size, + reader: reader, + skipped: &i.canSkip, + meta: i.messageInfo, + } + i.canSkip = true + } else { + msg, err = ReadMessage(msgType, reader) + if err != nil { + if err != io.EOF { + log.Printf("Batch Message decoding error on message with index %v, err: %s", i.messageInfo.Index, err) + } + return + } + msg = transformDeprecated(msg) + } + + // Preprocess "system" messages + if _, ok := i.preFilter[msg.TypeID()]; ok { + msg = msg.Decode() + if msg == nil { + log.Printf("can't decode message") + return + } + if err := i.preprocessing(msg); err != nil { + log.Printf("message preprocessing err: %s", err) + return + } + } + + // Skip messages we don't have in filter + if i.filter != nil { + if _, ok := i.filter[msg.TypeID()]; !ok { + continue + } + } + + if i.autoDecode { + msg = msg.Decode() + if msg == nil { + log.Printf("can't decode message") + return + } + } + + // Set meta information for message + msg.Meta().SetMeta(i.messageInfo) + + // Process message + i.handler(msg) + } +} + +func (i *messageIteratorImpl) preprocessing(msg Message) error { + switch m := msg.(type) { + case *BatchMetadata: + if i.messageInfo.Index > 1 { // Might be several 0-0 BatchMeta in a row without an error though + return fmt.Errorf("batchMetadata found at the end of the batch") + } + if m.Version > 1 { + return fmt.Errorf("incorrect batch version: %d, skip current batch", i.version) + } + i.messageInfo.Index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha) + i.messageInfo.Timestamp = m.Timestamp + i.messageInfo.Url = m.Url + i.version = m.Version + + case *BatchMeta: // Is not required to be present in batch since IOS doesn't have it (though we might change it) + if i.messageInfo.Index > 1 { // Might be several 0-0 BatchMeta in a row without an error though + return fmt.Errorf("batchMeta found at the end of the batch") + } + i.messageInfo.Index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha) + i.messageInfo.Timestamp = m.Timestamp + + case *Timestamp: + i.messageInfo.Timestamp = int64(m.Timestamp) + + case *SessionStart: + i.messageInfo.Timestamp = int64(m.Timestamp) + + case *SessionEnd: + i.messageInfo.Timestamp = int64(m.Timestamp) + + case *SetPageLocation: + i.messageInfo.Url = m.URL + } + return nil +} + +func messageHasSize(msgType uint64) bool { + return !(msgType == 80 || msgType == 81 || msgType == 82) +} diff --git a/backend/pkg/messages/message.go b/backend/pkg/messages/message.go index 16ab1920d..ad21dbf6a 100644 --- a/backend/pkg/messages/message.go +++ b/backend/pkg/messages/message.go @@ -1,25 +1,72 @@ package messages -type message struct { - Timestamp int64 - Index uint64 - Url string -} - -func (m *message) Meta() *message { - return m -} - -func (m *message) SetMeta(origin *message) { - m.Timestamp = origin.Timestamp - m.Index = origin.Index - m.Url = origin.Url -} - type Message interface { Encode() []byte EncodeWithIndex() []byte Decode() Message TypeID() int Meta() *message + SessionID() uint64 +} + +// BatchInfo represents common information for all messages inside data batch +type BatchInfo struct { + sessionID uint64 + id uint64 + topic string + timestamp int64 +} + +func NewBatchInfo(sessID uint64, topic string, id uint64, ts int64) *BatchInfo { + return &BatchInfo{ + sessionID: sessID, + id: id, + topic: topic, + timestamp: ts, + } +} + +func (b *BatchInfo) SessionID() uint64 { + return b.sessionID +} + +func (b *BatchInfo) ID() uint64 { + return b.id +} + +func (b *BatchInfo) Timestamp() int64 { + return b.timestamp +} + +type message struct { + Timestamp int64 + Index uint64 + Url string + batch *BatchInfo +} + +func (m *message) Batch() *BatchInfo { + return m.batch +} + +func (m *message) Meta() *message { + return m +} + +func (m *message) SetMeta(origin *message) { + m.batch = origin.batch + m.Timestamp = origin.Timestamp + m.Index = origin.Index + m.Url = origin.Url +} + +func (m *message) SessionID() uint64 { + return m.batch.sessionID +} + +func (m *message) SetSessionID(sessID uint64) { + if m.batch == nil { + m.batch = &BatchInfo{} + } + m.batch.sessionID = sessID } diff --git a/backend/pkg/messages/messages.go b/backend/pkg/messages/messages.go index 8cdb95722..6bbd2eb97 100644 --- a/backend/pkg/messages/messages.go +++ b/backend/pkg/messages/messages.go @@ -4,204 +4,209 @@ package messages import "encoding/binary" const ( - MsgBatchMeta = 80 - MsgBatchMetadata = 81 + MsgBatchMeta = 80 - MsgPartitionedMessage = 82 + MsgBatchMetadata = 81 - MsgTimestamp = 0 + MsgPartitionedMessage = 82 - MsgSessionStart = 1 + MsgTimestamp = 0 - MsgSessionEnd = 3 + MsgSessionStart = 1 - MsgSetPageLocation = 4 + MsgSessionEnd = 3 - MsgSetViewportSize = 5 + MsgSetPageLocation = 4 - MsgSetViewportScroll = 6 + MsgSetViewportSize = 5 - MsgCreateDocument = 7 + MsgSetViewportScroll = 6 - MsgCreateElementNode = 8 + MsgCreateDocument = 7 - MsgCreateTextNode = 9 + MsgCreateElementNode = 8 - MsgMoveNode = 10 + MsgCreateTextNode = 9 - MsgRemoveNode = 11 + MsgMoveNode = 10 - MsgSetNodeAttribute = 12 + MsgRemoveNode = 11 - MsgRemoveNodeAttribute = 13 + MsgSetNodeAttribute = 12 - MsgSetNodeData = 14 + MsgRemoveNodeAttribute = 13 - MsgSetCSSData = 15 + MsgSetNodeData = 14 - MsgSetNodeScroll = 16 + MsgSetCSSData = 15 - MsgSetInputTarget = 17 + MsgSetNodeScroll = 16 - MsgSetInputValue = 18 + MsgSetInputTarget = 17 - MsgSetInputChecked = 19 + MsgSetInputValue = 18 - MsgMouseMove = 20 + MsgSetInputChecked = 19 - MsgMouseClickDepricated = 21 + MsgMouseMove = 20 - MsgConsoleLog = 22 + MsgMouseClickDepricated = 21 - MsgPageLoadTiming = 23 + MsgConsoleLog = 22 - MsgPageRenderTiming = 24 + MsgPageLoadTiming = 23 - MsgJSException = 25 + MsgPageRenderTiming = 24 - MsgIntegrationEvent = 26 + MsgJSException = 25 - MsgRawCustomEvent = 27 + MsgIntegrationEvent = 26 - MsgUserID = 28 + MsgRawCustomEvent = 27 - MsgUserAnonymousID = 29 + MsgUserID = 28 - MsgMetadata = 30 + MsgUserAnonymousID = 29 - MsgPageEvent = 31 + MsgMetadata = 30 - MsgInputEvent = 32 + MsgPageEvent = 31 - MsgClickEvent = 33 + MsgInputEvent = 32 - MsgErrorEvent = 34 + MsgClickEvent = 33 - MsgResourceEvent = 35 + MsgErrorEvent = 34 - MsgCustomEvent = 36 + MsgResourceEvent = 35 - MsgCSSInsertRule = 37 + MsgCustomEvent = 36 - MsgCSSDeleteRule = 38 + MsgCSSInsertRule = 37 - MsgFetch = 39 + MsgCSSDeleteRule = 38 - MsgProfiler = 40 + MsgFetch = 39 - MsgOTable = 41 + MsgProfiler = 40 - MsgStateAction = 42 + MsgOTable = 41 - MsgStateActionEvent = 43 + MsgStateAction = 42 - MsgRedux = 44 + MsgStateActionEvent = 43 - MsgVuex = 45 + MsgRedux = 44 - MsgMobX = 46 + MsgVuex = 45 - MsgNgRx = 47 + MsgMobX = 46 - MsgGraphQL = 48 + MsgNgRx = 47 - MsgPerformanceTrack = 49 + MsgGraphQL = 48 - MsgGraphQLEvent = 50 + MsgPerformanceTrack = 49 - MsgFetchEvent = 51 + MsgGraphQLEvent = 50 - MsgDOMDrop = 52 + MsgFetchEvent = 51 - MsgResourceTiming = 53 + MsgDOMDrop = 52 - MsgConnectionInformation = 54 + MsgResourceTiming = 53 - MsgSetPageVisibility = 55 + MsgConnectionInformation = 54 - MsgPerformanceTrackAggr = 56 + MsgSetPageVisibility = 55 - MsgLongTask = 59 + MsgPerformanceTrackAggr = 56 - MsgSetNodeAttributeURLBased = 60 + MsgLongTask = 59 - MsgSetCSSDataURLBased = 61 + MsgSetNodeAttributeURLBased = 60 - MsgIssueEvent = 62 + MsgSetCSSDataURLBased = 61 - MsgTechnicalInfo = 63 + MsgIssueEvent = 62 - MsgCustomIssue = 64 + MsgTechnicalInfo = 63 - MsgAssetCache = 66 + MsgCustomIssue = 64 - MsgCSSInsertRuleURLBased = 67 + MsgAssetCache = 66 - MsgMouseClick = 69 + MsgCSSInsertRuleURLBased = 67 - MsgCreateIFrameDocument = 70 + MsgMouseClick = 69 - MsgAdoptedSSReplaceURLBased = 71 + MsgCreateIFrameDocument = 70 - MsgAdoptedSSReplace = 72 + MsgAdoptedSSReplaceURLBased = 71 - MsgAdoptedSSInsertRuleURLBased = 73 + MsgAdoptedSSReplace = 72 - MsgAdoptedSSInsertRule = 74 + MsgAdoptedSSInsertRuleURLBased = 73 - MsgAdoptedSSDeleteRule = 75 + MsgAdoptedSSInsertRule = 74 - MsgAdoptedSSAddOwner = 76 + MsgAdoptedSSDeleteRule = 75 - MsgAdoptedSSRemoveOwner = 77 + MsgAdoptedSSAddOwner = 76 - MsgZustand = 79 + MsgAdoptedSSRemoveOwner = 77 - MsgIOSBatchMeta = 107 + MsgZustand = 79 - MsgIOSSessionStart = 90 + MsgSessionSearch = 127 - MsgIOSSessionEnd = 91 + MsgIOSBatchMeta = 107 - MsgIOSMetadata = 92 + MsgIOSSessionStart = 90 - MsgIOSCustomEvent = 93 + MsgIOSSessionEnd = 91 - MsgIOSUserID = 94 + MsgIOSMetadata = 92 - MsgIOSUserAnonymousID = 95 + MsgIOSCustomEvent = 93 - MsgIOSScreenChanges = 96 + MsgIOSUserID = 94 - MsgIOSCrash = 97 + MsgIOSUserAnonymousID = 95 - MsgIOSScreenEnter = 98 + MsgIOSScreenChanges = 96 - MsgIOSScreenLeave = 99 + MsgIOSCrash = 97 - MsgIOSClickEvent = 100 + MsgIOSScreenEnter = 98 - MsgIOSInputEvent = 101 + MsgIOSScreenLeave = 99 - MsgIOSPerformanceEvent = 102 + MsgIOSClickEvent = 100 - MsgIOSLog = 103 + MsgIOSInputEvent = 101 - MsgIOSInternalError = 104 + MsgIOSPerformanceEvent = 102 - MsgIOSNetworkCall = 105 + MsgIOSLog = 103 - MsgIOSPerformanceAggregated = 110 + MsgIOSInternalError = 104 + + MsgIOSNetworkCall = 105 + + MsgIOSPerformanceAggregated = 110 + + MsgIOSIssueEvent = 111 - MsgIOSIssueEvent = 111 ) + type BatchMeta struct { message - PageNo uint64 + PageNo uint64 FirstIndex uint64 - Timestamp int64 + Timestamp int64 } func (msg *BatchMeta) Encode() []byte { @@ -215,14 +220,14 @@ func (msg *BatchMeta) Encode() []byte { } func (msg *BatchMeta) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *BatchMeta) Decode() Message { @@ -235,11 +240,11 @@ func (msg *BatchMeta) TypeID() int { type BatchMetadata struct { message - Version uint64 - PageNo uint64 + Version uint64 + PageNo uint64 FirstIndex uint64 - Timestamp int64 - Location string + Timestamp int64 + Location string } func (msg *BatchMetadata) Encode() []byte { @@ -255,14 +260,14 @@ func (msg *BatchMetadata) Encode() []byte { } func (msg *BatchMetadata) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *BatchMetadata) Decode() Message { @@ -275,7 +280,7 @@ func (msg *BatchMetadata) TypeID() int { type PartitionedMessage struct { message - PartNo uint64 + PartNo uint64 PartTotal uint64 } @@ -289,14 +294,14 @@ func (msg *PartitionedMessage) Encode() []byte { } func (msg *PartitionedMessage) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *PartitionedMessage) Decode() Message { @@ -321,14 +326,14 @@ func (msg *Timestamp) Encode() []byte { } func (msg *Timestamp) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *Timestamp) Decode() Message { @@ -341,22 +346,22 @@ func (msg *Timestamp) TypeID() int { type SessionStart struct { message - Timestamp uint64 - ProjectID uint64 - TrackerVersion string - RevID string - UserUUID string - UserAgent string - UserOS string - UserOSVersion string - UserBrowser string - UserBrowserVersion string - UserDevice string - UserDeviceType string + Timestamp uint64 + ProjectID uint64 + TrackerVersion string + RevID string + UserUUID string + UserAgent string + UserOS string + UserOSVersion string + UserBrowser string + UserBrowserVersion string + UserDevice string + UserDeviceType string UserDeviceMemorySize uint64 - UserDeviceHeapSize uint64 - UserCountry string - UserID string + UserDeviceHeapSize uint64 + UserCountry string + UserID string } func (msg *SessionStart) Encode() []byte { @@ -383,14 +388,14 @@ func (msg *SessionStart) Encode() []byte { } func (msg *SessionStart) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *SessionStart) Decode() Message { @@ -415,14 +420,14 @@ func (msg *SessionEnd) Encode() []byte { } func (msg *SessionEnd) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *SessionEnd) Decode() Message { @@ -435,8 +440,8 @@ func (msg *SessionEnd) TypeID() int { type SetPageLocation struct { message - URL string - Referrer string + URL string + Referrer string NavigationStart uint64 } @@ -451,14 +456,14 @@ func (msg *SetPageLocation) Encode() []byte { } func (msg *SetPageLocation) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *SetPageLocation) Decode() Message { @@ -471,7 +476,7 @@ func (msg *SetPageLocation) TypeID() int { type SetViewportSize struct { message - Width uint64 + Width uint64 Height uint64 } @@ -485,14 +490,14 @@ func (msg *SetViewportSize) Encode() []byte { } func (msg *SetViewportSize) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *SetViewportSize) Decode() Message { @@ -519,14 +524,14 @@ func (msg *SetViewportScroll) Encode() []byte { } func (msg *SetViewportScroll) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *SetViewportScroll) Decode() Message { @@ -539,6 +544,7 @@ func (msg *SetViewportScroll) TypeID() int { type CreateDocument struct { message + } func (msg *CreateDocument) Encode() []byte { @@ -550,14 +556,14 @@ func (msg *CreateDocument) Encode() []byte { } func (msg *CreateDocument) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *CreateDocument) Decode() Message { @@ -570,11 +576,11 @@ func (msg *CreateDocument) TypeID() int { type CreateElementNode struct { message - ID uint64 + ID uint64 ParentID uint64 - index uint64 - Tag string - SVG bool + index uint64 + Tag string + SVG bool } func (msg *CreateElementNode) Encode() []byte { @@ -590,14 +596,14 @@ func (msg *CreateElementNode) Encode() []byte { } func (msg *CreateElementNode) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *CreateElementNode) Decode() Message { @@ -610,9 +616,9 @@ func (msg *CreateElementNode) TypeID() int { type CreateTextNode struct { message - ID uint64 + ID uint64 ParentID uint64 - Index uint64 + Index uint64 } func (msg *CreateTextNode) Encode() []byte { @@ -626,14 +632,14 @@ func (msg *CreateTextNode) Encode() []byte { } func (msg *CreateTextNode) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *CreateTextNode) Decode() Message { @@ -646,9 +652,9 @@ func (msg *CreateTextNode) TypeID() int { type MoveNode struct { message - ID uint64 + ID uint64 ParentID uint64 - Index uint64 + Index uint64 } func (msg *MoveNode) Encode() []byte { @@ -662,14 +668,14 @@ func (msg *MoveNode) Encode() []byte { } func (msg *MoveNode) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *MoveNode) Decode() Message { @@ -694,14 +700,14 @@ func (msg *RemoveNode) Encode() []byte { } func (msg *RemoveNode) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *RemoveNode) Decode() Message { @@ -714,8 +720,8 @@ func (msg *RemoveNode) TypeID() int { type SetNodeAttribute struct { message - ID uint64 - Name string + ID uint64 + Name string Value string } @@ -730,14 +736,14 @@ func (msg *SetNodeAttribute) Encode() []byte { } func (msg *SetNodeAttribute) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *SetNodeAttribute) Decode() Message { @@ -750,7 +756,7 @@ func (msg *SetNodeAttribute) TypeID() int { type RemoveNodeAttribute struct { message - ID uint64 + ID uint64 Name string } @@ -764,14 +770,14 @@ func (msg *RemoveNodeAttribute) Encode() []byte { } func (msg *RemoveNodeAttribute) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *RemoveNodeAttribute) Decode() Message { @@ -784,7 +790,7 @@ func (msg *RemoveNodeAttribute) TypeID() int { type SetNodeData struct { message - ID uint64 + ID uint64 Data string } @@ -798,14 +804,14 @@ func (msg *SetNodeData) Encode() []byte { } func (msg *SetNodeData) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *SetNodeData) Decode() Message { @@ -818,7 +824,7 @@ func (msg *SetNodeData) TypeID() int { type SetCSSData struct { message - ID uint64 + ID uint64 Data string } @@ -832,14 +838,14 @@ func (msg *SetCSSData) Encode() []byte { } func (msg *SetCSSData) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *SetCSSData) Decode() Message { @@ -853,8 +859,8 @@ func (msg *SetCSSData) TypeID() int { type SetNodeScroll struct { message ID uint64 - X int64 - Y int64 + X int64 + Y int64 } func (msg *SetNodeScroll) Encode() []byte { @@ -868,14 +874,14 @@ func (msg *SetNodeScroll) Encode() []byte { } func (msg *SetNodeScroll) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *SetNodeScroll) Decode() Message { @@ -888,7 +894,7 @@ func (msg *SetNodeScroll) TypeID() int { type SetInputTarget struct { message - ID uint64 + ID uint64 Label string } @@ -902,14 +908,14 @@ func (msg *SetInputTarget) Encode() []byte { } func (msg *SetInputTarget) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *SetInputTarget) Decode() Message { @@ -922,9 +928,9 @@ func (msg *SetInputTarget) TypeID() int { type SetInputValue struct { message - ID uint64 + ID uint64 Value string - Mask int64 + Mask int64 } func (msg *SetInputValue) Encode() []byte { @@ -938,14 +944,14 @@ func (msg *SetInputValue) Encode() []byte { } func (msg *SetInputValue) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *SetInputValue) Decode() Message { @@ -958,7 +964,7 @@ func (msg *SetInputValue) TypeID() int { type SetInputChecked struct { message - ID uint64 + ID uint64 Checked bool } @@ -972,14 +978,14 @@ func (msg *SetInputChecked) Encode() []byte { } func (msg *SetInputChecked) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *SetInputChecked) Decode() Message { @@ -1006,14 +1012,14 @@ func (msg *MouseMove) Encode() []byte { } func (msg *MouseMove) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *MouseMove) Decode() Message { @@ -1026,9 +1032,9 @@ func (msg *MouseMove) TypeID() int { type MouseClickDepricated struct { message - ID uint64 + ID uint64 HesitationTime uint64 - Label string + Label string } func (msg *MouseClickDepricated) Encode() []byte { @@ -1042,14 +1048,14 @@ func (msg *MouseClickDepricated) Encode() []byte { } func (msg *MouseClickDepricated) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *MouseClickDepricated) Decode() Message { @@ -1076,14 +1082,14 @@ func (msg *ConsoleLog) Encode() []byte { } func (msg *ConsoleLog) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *ConsoleLog) Decode() Message { @@ -1096,15 +1102,15 @@ func (msg *ConsoleLog) TypeID() int { type PageLoadTiming struct { message - RequestStart uint64 - ResponseStart uint64 - ResponseEnd uint64 + RequestStart uint64 + ResponseStart uint64 + ResponseEnd uint64 DomContentLoadedEventStart uint64 - DomContentLoadedEventEnd uint64 - LoadEventStart uint64 - LoadEventEnd uint64 - FirstPaint uint64 - FirstContentfulPaint uint64 + DomContentLoadedEventEnd uint64 + LoadEventStart uint64 + LoadEventEnd uint64 + FirstPaint uint64 + FirstContentfulPaint uint64 } func (msg *PageLoadTiming) Encode() []byte { @@ -1124,14 +1130,14 @@ func (msg *PageLoadTiming) Encode() []byte { } func (msg *PageLoadTiming) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *PageLoadTiming) Decode() Message { @@ -1144,8 +1150,8 @@ func (msg *PageLoadTiming) TypeID() int { type PageRenderTiming struct { message - SpeedIndex uint64 - VisuallyComplete uint64 + SpeedIndex uint64 + VisuallyComplete uint64 TimeToInteractive uint64 } @@ -1160,14 +1166,14 @@ func (msg *PageRenderTiming) Encode() []byte { } func (msg *PageRenderTiming) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *PageRenderTiming) Decode() Message { @@ -1180,7 +1186,7 @@ func (msg *PageRenderTiming) TypeID() int { type JSException struct { message - Name string + Name string Message string Payload string } @@ -1196,14 +1202,14 @@ func (msg *JSException) Encode() []byte { } func (msg *JSException) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *JSException) Decode() Message { @@ -1217,10 +1223,10 @@ func (msg *JSException) TypeID() int { type IntegrationEvent struct { message Timestamp uint64 - Source string - Name string - Message string - Payload string + Source string + Name string + Message string + Payload string } func (msg *IntegrationEvent) Encode() []byte { @@ -1236,14 +1242,14 @@ func (msg *IntegrationEvent) Encode() []byte { } func (msg *IntegrationEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IntegrationEvent) Decode() Message { @@ -1256,7 +1262,7 @@ func (msg *IntegrationEvent) TypeID() int { type RawCustomEvent struct { message - Name string + Name string Payload string } @@ -1270,14 +1276,14 @@ func (msg *RawCustomEvent) Encode() []byte { } func (msg *RawCustomEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *RawCustomEvent) Decode() Message { @@ -1302,14 +1308,14 @@ func (msg *UserID) Encode() []byte { } func (msg *UserID) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *UserID) Decode() Message { @@ -1334,14 +1340,14 @@ func (msg *UserAnonymousID) Encode() []byte { } func (msg *UserAnonymousID) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *UserAnonymousID) Decode() Message { @@ -1354,7 +1360,7 @@ func (msg *UserAnonymousID) TypeID() int { type Metadata struct { message - Key string + Key string Value string } @@ -1368,14 +1374,14 @@ func (msg *Metadata) Encode() []byte { } func (msg *Metadata) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *Metadata) Decode() Message { @@ -1388,23 +1394,23 @@ func (msg *Metadata) TypeID() int { type PageEvent struct { message - MessageID uint64 - Timestamp uint64 - URL string - Referrer string - Loaded bool - RequestStart uint64 - ResponseStart uint64 - ResponseEnd uint64 + MessageID uint64 + Timestamp uint64 + URL string + Referrer string + Loaded bool + RequestStart uint64 + ResponseStart uint64 + ResponseEnd uint64 DomContentLoadedEventStart uint64 - DomContentLoadedEventEnd uint64 - LoadEventStart uint64 - LoadEventEnd uint64 - FirstPaint uint64 - FirstContentfulPaint uint64 - SpeedIndex uint64 - VisuallyComplete uint64 - TimeToInteractive uint64 + DomContentLoadedEventEnd uint64 + LoadEventStart uint64 + LoadEventEnd uint64 + FirstPaint uint64 + FirstContentfulPaint uint64 + SpeedIndex uint64 + VisuallyComplete uint64 + TimeToInteractive uint64 } func (msg *PageEvent) Encode() []byte { @@ -1432,14 +1438,14 @@ func (msg *PageEvent) Encode() []byte { } func (msg *PageEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *PageEvent) Decode() Message { @@ -1452,11 +1458,11 @@ func (msg *PageEvent) TypeID() int { type InputEvent struct { message - MessageID uint64 - Timestamp uint64 - Value string + MessageID uint64 + Timestamp uint64 + Value string ValueMasked bool - Label string + Label string } func (msg *InputEvent) Encode() []byte { @@ -1472,14 +1478,14 @@ func (msg *InputEvent) Encode() []byte { } func (msg *InputEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *InputEvent) Decode() Message { @@ -1492,11 +1498,11 @@ func (msg *InputEvent) TypeID() int { type ClickEvent struct { message - MessageID uint64 - Timestamp uint64 + MessageID uint64 + Timestamp uint64 HesitationTime uint64 - Label string - Selector string + Label string + Selector string } func (msg *ClickEvent) Encode() []byte { @@ -1512,14 +1518,14 @@ func (msg *ClickEvent) Encode() []byte { } func (msg *ClickEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *ClickEvent) Decode() Message { @@ -1534,10 +1540,10 @@ type ErrorEvent struct { message MessageID uint64 Timestamp uint64 - Source string - Name string - Message string - Payload string + Source string + Name string + Message string + Payload string } func (msg *ErrorEvent) Encode() []byte { @@ -1554,14 +1560,14 @@ func (msg *ErrorEvent) Encode() []byte { } func (msg *ErrorEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *ErrorEvent) Decode() Message { @@ -1574,18 +1580,18 @@ func (msg *ErrorEvent) TypeID() int { type ResourceEvent struct { message - MessageID uint64 - Timestamp uint64 - Duration uint64 - TTFB uint64 - HeaderSize uint64 + MessageID uint64 + Timestamp uint64 + Duration uint64 + TTFB uint64 + HeaderSize uint64 EncodedBodySize uint64 DecodedBodySize uint64 - URL string - Type string - Success bool - Method string - Status uint64 + URL string + Type string + Success bool + Method string + Status uint64 } func (msg *ResourceEvent) Encode() []byte { @@ -1608,14 +1614,14 @@ func (msg *ResourceEvent) Encode() []byte { } func (msg *ResourceEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *ResourceEvent) Decode() Message { @@ -1630,8 +1636,8 @@ type CustomEvent struct { message MessageID uint64 Timestamp uint64 - Name string - Payload string + Name string + Payload string } func (msg *CustomEvent) Encode() []byte { @@ -1646,14 +1652,14 @@ func (msg *CustomEvent) Encode() []byte { } func (msg *CustomEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *CustomEvent) Decode() Message { @@ -1666,8 +1672,8 @@ func (msg *CustomEvent) TypeID() int { type CSSInsertRule struct { message - ID uint64 - Rule string + ID uint64 + Rule string Index uint64 } @@ -1682,14 +1688,14 @@ func (msg *CSSInsertRule) Encode() []byte { } func (msg *CSSInsertRule) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *CSSInsertRule) Decode() Message { @@ -1702,7 +1708,7 @@ func (msg *CSSInsertRule) TypeID() int { type CSSDeleteRule struct { message - ID uint64 + ID uint64 Index uint64 } @@ -1716,14 +1722,14 @@ func (msg *CSSDeleteRule) Encode() []byte { } func (msg *CSSDeleteRule) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *CSSDeleteRule) Decode() Message { @@ -1736,13 +1742,13 @@ func (msg *CSSDeleteRule) TypeID() int { type Fetch struct { message - Method string - URL string - Request string - Response string - Status uint64 + Method string + URL string + Request string + Response string + Status uint64 Timestamp uint64 - Duration uint64 + Duration uint64 } func (msg *Fetch) Encode() []byte { @@ -1760,14 +1766,14 @@ func (msg *Fetch) Encode() []byte { } func (msg *Fetch) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *Fetch) Decode() Message { @@ -1780,10 +1786,10 @@ func (msg *Fetch) TypeID() int { type Profiler struct { message - Name string + Name string Duration uint64 - Args string - Result string + Args string + Result string } func (msg *Profiler) Encode() []byte { @@ -1798,14 +1804,14 @@ func (msg *Profiler) Encode() []byte { } func (msg *Profiler) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *Profiler) Decode() Message { @@ -1818,7 +1824,7 @@ func (msg *Profiler) TypeID() int { type OTable struct { message - Key string + Key string Value string } @@ -1832,14 +1838,14 @@ func (msg *OTable) Encode() []byte { } func (msg *OTable) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *OTable) Decode() Message { @@ -1864,14 +1870,14 @@ func (msg *StateAction) Encode() []byte { } func (msg *StateAction) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *StateAction) Decode() Message { @@ -1886,7 +1892,7 @@ type StateActionEvent struct { message MessageID uint64 Timestamp uint64 - Type string + Type string } func (msg *StateActionEvent) Encode() []byte { @@ -1900,14 +1906,14 @@ func (msg *StateActionEvent) Encode() []byte { } func (msg *StateActionEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *StateActionEvent) Decode() Message { @@ -1920,8 +1926,8 @@ func (msg *StateActionEvent) TypeID() int { type Redux struct { message - Action string - State string + Action string + State string Duration uint64 } @@ -1936,14 +1942,14 @@ func (msg *Redux) Encode() []byte { } func (msg *Redux) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *Redux) Decode() Message { @@ -1957,7 +1963,7 @@ func (msg *Redux) TypeID() int { type Vuex struct { message Mutation string - State string + State string } func (msg *Vuex) Encode() []byte { @@ -1970,14 +1976,14 @@ func (msg *Vuex) Encode() []byte { } func (msg *Vuex) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *Vuex) Decode() Message { @@ -1990,7 +1996,7 @@ func (msg *Vuex) TypeID() int { type MobX struct { message - Type string + Type string Payload string } @@ -2004,14 +2010,14 @@ func (msg *MobX) Encode() []byte { } func (msg *MobX) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *MobX) Decode() Message { @@ -2024,8 +2030,8 @@ func (msg *MobX) TypeID() int { type NgRx struct { message - Action string - State string + Action string + State string Duration uint64 } @@ -2040,14 +2046,14 @@ func (msg *NgRx) Encode() []byte { } func (msg *NgRx) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *NgRx) Decode() Message { @@ -2062,8 +2068,8 @@ type GraphQL struct { message OperationKind string OperationName string - Variables string - Response string + Variables string + Response string } func (msg *GraphQL) Encode() []byte { @@ -2078,14 +2084,14 @@ func (msg *GraphQL) Encode() []byte { } func (msg *GraphQL) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *GraphQL) Decode() Message { @@ -2098,10 +2104,10 @@ func (msg *GraphQL) TypeID() int { type PerformanceTrack struct { message - Frames int64 - Ticks int64 + Frames int64 + Ticks int64 TotalJSHeapSize uint64 - UsedJSHeapSize uint64 + UsedJSHeapSize uint64 } func (msg *PerformanceTrack) Encode() []byte { @@ -2116,14 +2122,14 @@ func (msg *PerformanceTrack) Encode() []byte { } func (msg *PerformanceTrack) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *PerformanceTrack) Decode() Message { @@ -2136,12 +2142,12 @@ func (msg *PerformanceTrack) TypeID() int { type GraphQLEvent struct { message - MessageID uint64 - Timestamp uint64 + MessageID uint64 + Timestamp uint64 OperationKind string OperationName string - Variables string - Response string + Variables string + Response string } func (msg *GraphQLEvent) Encode() []byte { @@ -2158,14 +2164,14 @@ func (msg *GraphQLEvent) Encode() []byte { } func (msg *GraphQLEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *GraphQLEvent) Decode() Message { @@ -2180,12 +2186,12 @@ type FetchEvent struct { message MessageID uint64 Timestamp uint64 - Method string - URL string - Request string - Response string - Status uint64 - Duration uint64 + Method string + URL string + Request string + Response string + Status uint64 + Duration uint64 } func (msg *FetchEvent) Encode() []byte { @@ -2204,14 +2210,14 @@ func (msg *FetchEvent) Encode() []byte { } func (msg *FetchEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *FetchEvent) Decode() Message { @@ -2236,14 +2242,14 @@ func (msg *DOMDrop) Encode() []byte { } func (msg *DOMDrop) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *DOMDrop) Decode() Message { @@ -2256,14 +2262,14 @@ func (msg *DOMDrop) TypeID() int { type ResourceTiming struct { message - Timestamp uint64 - Duration uint64 - TTFB uint64 - HeaderSize uint64 + Timestamp uint64 + Duration uint64 + TTFB uint64 + HeaderSize uint64 EncodedBodySize uint64 DecodedBodySize uint64 - URL string - Initiator string + URL string + Initiator string } func (msg *ResourceTiming) Encode() []byte { @@ -2282,14 +2288,14 @@ func (msg *ResourceTiming) Encode() []byte { } func (msg *ResourceTiming) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *ResourceTiming) Decode() Message { @@ -2303,7 +2309,7 @@ func (msg *ResourceTiming) TypeID() int { type ConnectionInformation struct { message Downlink uint64 - Type string + Type string } func (msg *ConnectionInformation) Encode() []byte { @@ -2316,14 +2322,14 @@ func (msg *ConnectionInformation) Encode() []byte { } func (msg *ConnectionInformation) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *ConnectionInformation) Decode() Message { @@ -2348,14 +2354,14 @@ func (msg *SetPageVisibility) Encode() []byte { } func (msg *SetPageVisibility) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *SetPageVisibility) Decode() Message { @@ -2368,20 +2374,20 @@ func (msg *SetPageVisibility) TypeID() int { type PerformanceTrackAggr struct { message - TimestampStart uint64 - TimestampEnd uint64 - MinFPS uint64 - AvgFPS uint64 - MaxFPS uint64 - MinCPU uint64 - AvgCPU uint64 - MaxCPU uint64 + TimestampStart uint64 + TimestampEnd uint64 + MinFPS uint64 + AvgFPS uint64 + MaxFPS uint64 + MinCPU uint64 + AvgCPU uint64 + MaxCPU uint64 MinTotalJSHeapSize uint64 AvgTotalJSHeapSize uint64 MaxTotalJSHeapSize uint64 - MinUsedJSHeapSize uint64 - AvgUsedJSHeapSize uint64 - MaxUsedJSHeapSize uint64 + MinUsedJSHeapSize uint64 + AvgUsedJSHeapSize uint64 + MaxUsedJSHeapSize uint64 } func (msg *PerformanceTrackAggr) Encode() []byte { @@ -2406,14 +2412,14 @@ func (msg *PerformanceTrackAggr) Encode() []byte { } func (msg *PerformanceTrackAggr) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *PerformanceTrackAggr) Decode() Message { @@ -2426,12 +2432,12 @@ func (msg *PerformanceTrackAggr) TypeID() int { type LongTask struct { message - Timestamp uint64 - Duration uint64 - Context uint64 + Timestamp uint64 + Duration uint64 + Context uint64 ContainerType uint64 - ContainerSrc string - ContainerId string + ContainerSrc string + ContainerId string ContainerName string } @@ -2450,14 +2456,14 @@ func (msg *LongTask) Encode() []byte { } func (msg *LongTask) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *LongTask) Decode() Message { @@ -2470,9 +2476,9 @@ func (msg *LongTask) TypeID() int { type SetNodeAttributeURLBased struct { message - ID uint64 - Name string - Value string + ID uint64 + Name string + Value string BaseURL string } @@ -2488,14 +2494,14 @@ func (msg *SetNodeAttributeURLBased) Encode() []byte { } func (msg *SetNodeAttributeURLBased) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *SetNodeAttributeURLBased) Decode() Message { @@ -2508,8 +2514,8 @@ func (msg *SetNodeAttributeURLBased) TypeID() int { type SetCSSDataURLBased struct { message - ID uint64 - Data string + ID uint64 + Data string BaseURL string } @@ -2524,14 +2530,14 @@ func (msg *SetCSSDataURLBased) Encode() []byte { } func (msg *SetCSSDataURLBased) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *SetCSSDataURLBased) Decode() Message { @@ -2544,12 +2550,12 @@ func (msg *SetCSSDataURLBased) TypeID() int { type IssueEvent struct { message - MessageID uint64 - Timestamp uint64 - Type string + MessageID uint64 + Timestamp uint64 + Type string ContextString string - Context string - Payload string + Context string + Payload string } func (msg *IssueEvent) Encode() []byte { @@ -2566,14 +2572,14 @@ func (msg *IssueEvent) Encode() []byte { } func (msg *IssueEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IssueEvent) Decode() Message { @@ -2586,7 +2592,7 @@ func (msg *IssueEvent) TypeID() int { type TechnicalInfo struct { message - Type string + Type string Value string } @@ -2600,14 +2606,14 @@ func (msg *TechnicalInfo) Encode() []byte { } func (msg *TechnicalInfo) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *TechnicalInfo) Decode() Message { @@ -2620,7 +2626,7 @@ func (msg *TechnicalInfo) TypeID() int { type CustomIssue struct { message - Name string + Name string Payload string } @@ -2634,14 +2640,14 @@ func (msg *CustomIssue) Encode() []byte { } func (msg *CustomIssue) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *CustomIssue) Decode() Message { @@ -2666,14 +2672,14 @@ func (msg *AssetCache) Encode() []byte { } func (msg *AssetCache) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *AssetCache) Decode() Message { @@ -2686,9 +2692,9 @@ func (msg *AssetCache) TypeID() int { type CSSInsertRuleURLBased struct { message - ID uint64 - Rule string - Index uint64 + ID uint64 + Rule string + Index uint64 BaseURL string } @@ -2704,14 +2710,14 @@ func (msg *CSSInsertRuleURLBased) Encode() []byte { } func (msg *CSSInsertRuleURLBased) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *CSSInsertRuleURLBased) Decode() Message { @@ -2724,10 +2730,10 @@ func (msg *CSSInsertRuleURLBased) TypeID() int { type MouseClick struct { message - ID uint64 + ID uint64 HesitationTime uint64 - Label string - Selector string + Label string + Selector string } func (msg *MouseClick) Encode() []byte { @@ -2742,14 +2748,14 @@ func (msg *MouseClick) Encode() []byte { } func (msg *MouseClick) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *MouseClick) Decode() Message { @@ -2763,7 +2769,7 @@ func (msg *MouseClick) TypeID() int { type CreateIFrameDocument struct { message FrameID uint64 - ID uint64 + ID uint64 } func (msg *CreateIFrameDocument) Encode() []byte { @@ -2776,14 +2782,14 @@ func (msg *CreateIFrameDocument) Encode() []byte { } func (msg *CreateIFrameDocument) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *CreateIFrameDocument) Decode() Message { @@ -2797,7 +2803,7 @@ func (msg *CreateIFrameDocument) TypeID() int { type AdoptedSSReplaceURLBased struct { message SheetID uint64 - Text string + Text string BaseURL string } @@ -2812,14 +2818,14 @@ func (msg *AdoptedSSReplaceURLBased) Encode() []byte { } func (msg *AdoptedSSReplaceURLBased) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *AdoptedSSReplaceURLBased) Decode() Message { @@ -2833,7 +2839,7 @@ func (msg *AdoptedSSReplaceURLBased) TypeID() int { type AdoptedSSReplace struct { message SheetID uint64 - Text string + Text string } func (msg *AdoptedSSReplace) Encode() []byte { @@ -2846,14 +2852,14 @@ func (msg *AdoptedSSReplace) Encode() []byte { } func (msg *AdoptedSSReplace) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *AdoptedSSReplace) Decode() Message { @@ -2867,8 +2873,8 @@ func (msg *AdoptedSSReplace) TypeID() int { type AdoptedSSInsertRuleURLBased struct { message SheetID uint64 - Rule string - Index uint64 + Rule string + Index uint64 BaseURL string } @@ -2884,14 +2890,14 @@ func (msg *AdoptedSSInsertRuleURLBased) Encode() []byte { } func (msg *AdoptedSSInsertRuleURLBased) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *AdoptedSSInsertRuleURLBased) Decode() Message { @@ -2905,8 +2911,8 @@ func (msg *AdoptedSSInsertRuleURLBased) TypeID() int { type AdoptedSSInsertRule struct { message SheetID uint64 - Rule string - Index uint64 + Rule string + Index uint64 } func (msg *AdoptedSSInsertRule) Encode() []byte { @@ -2920,14 +2926,14 @@ func (msg *AdoptedSSInsertRule) Encode() []byte { } func (msg *AdoptedSSInsertRule) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *AdoptedSSInsertRule) Decode() Message { @@ -2941,7 +2947,7 @@ func (msg *AdoptedSSInsertRule) TypeID() int { type AdoptedSSDeleteRule struct { message SheetID uint64 - Index uint64 + Index uint64 } func (msg *AdoptedSSDeleteRule) Encode() []byte { @@ -2954,14 +2960,14 @@ func (msg *AdoptedSSDeleteRule) Encode() []byte { } func (msg *AdoptedSSDeleteRule) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *AdoptedSSDeleteRule) Decode() Message { @@ -2975,7 +2981,7 @@ func (msg *AdoptedSSDeleteRule) TypeID() int { type AdoptedSSAddOwner struct { message SheetID uint64 - ID uint64 + ID uint64 } func (msg *AdoptedSSAddOwner) Encode() []byte { @@ -2988,14 +2994,14 @@ func (msg *AdoptedSSAddOwner) Encode() []byte { } func (msg *AdoptedSSAddOwner) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *AdoptedSSAddOwner) Decode() Message { @@ -3009,7 +3015,7 @@ func (msg *AdoptedSSAddOwner) TypeID() int { type AdoptedSSRemoveOwner struct { message SheetID uint64 - ID uint64 + ID uint64 } func (msg *AdoptedSSRemoveOwner) Encode() []byte { @@ -3022,14 +3028,14 @@ func (msg *AdoptedSSRemoveOwner) Encode() []byte { } func (msg *AdoptedSSRemoveOwner) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *AdoptedSSRemoveOwner) Decode() Message { @@ -3043,7 +3049,7 @@ func (msg *AdoptedSSRemoveOwner) TypeID() int { type Zustand struct { message Mutation string - State string + State string } func (msg *Zustand) Encode() []byte { @@ -3056,14 +3062,14 @@ func (msg *Zustand) Encode() []byte { } func (msg *Zustand) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *Zustand) Decode() Message { @@ -3074,10 +3080,44 @@ func (msg *Zustand) TypeID() int { return 79 } +type SessionSearch struct { + message + Timestamp uint64 + Partition uint64 +} + +func (msg *SessionSearch) Encode() []byte { + buf := make([]byte, 21) + buf[0] = 127 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Partition, buf, p) + return buf[:p] +} + +func (msg *SessionSearch) EncodeWithIndex() []byte { + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data +} + +func (msg *SessionSearch) Decode() Message { + return msg +} + +func (msg *SessionSearch) TypeID() int { + return 127 +} + type IOSBatchMeta struct { message - Timestamp uint64 - Length uint64 + Timestamp uint64 + Length uint64 FirstIndex uint64 } @@ -3092,14 +3132,14 @@ func (msg *IOSBatchMeta) Encode() []byte { } func (msg *IOSBatchMeta) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSBatchMeta) Decode() Message { @@ -3112,16 +3152,16 @@ func (msg *IOSBatchMeta) TypeID() int { type IOSSessionStart struct { message - Timestamp uint64 - ProjectID uint64 + Timestamp uint64 + ProjectID uint64 TrackerVersion string - RevID string - UserUUID string - UserOS string - UserOSVersion string - UserDevice string + RevID string + UserUUID string + UserOS string + UserOSVersion string + UserDevice string UserDeviceType string - UserCountry string + UserCountry string } func (msg *IOSSessionStart) Encode() []byte { @@ -3142,14 +3182,14 @@ func (msg *IOSSessionStart) Encode() []byte { } func (msg *IOSSessionStart) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSSessionStart) Decode() Message { @@ -3174,14 +3214,14 @@ func (msg *IOSSessionEnd) Encode() []byte { } func (msg *IOSSessionEnd) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSSessionEnd) Decode() Message { @@ -3195,9 +3235,9 @@ func (msg *IOSSessionEnd) TypeID() int { type IOSMetadata struct { message Timestamp uint64 - Length uint64 - Key string - Value string + Length uint64 + Key string + Value string } func (msg *IOSMetadata) Encode() []byte { @@ -3212,14 +3252,14 @@ func (msg *IOSMetadata) Encode() []byte { } func (msg *IOSMetadata) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSMetadata) Decode() Message { @@ -3233,9 +3273,9 @@ func (msg *IOSMetadata) TypeID() int { type IOSCustomEvent struct { message Timestamp uint64 - Length uint64 - Name string - Payload string + Length uint64 + Name string + Payload string } func (msg *IOSCustomEvent) Encode() []byte { @@ -3250,14 +3290,14 @@ func (msg *IOSCustomEvent) Encode() []byte { } func (msg *IOSCustomEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSCustomEvent) Decode() Message { @@ -3271,8 +3311,8 @@ func (msg *IOSCustomEvent) TypeID() int { type IOSUserID struct { message Timestamp uint64 - Length uint64 - Value string + Length uint64 + Value string } func (msg *IOSUserID) Encode() []byte { @@ -3286,14 +3326,14 @@ func (msg *IOSUserID) Encode() []byte { } func (msg *IOSUserID) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSUserID) Decode() Message { @@ -3307,8 +3347,8 @@ func (msg *IOSUserID) TypeID() int { type IOSUserAnonymousID struct { message Timestamp uint64 - Length uint64 - Value string + Length uint64 + Value string } func (msg *IOSUserAnonymousID) Encode() []byte { @@ -3322,14 +3362,14 @@ func (msg *IOSUserAnonymousID) Encode() []byte { } func (msg *IOSUserAnonymousID) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSUserAnonymousID) Decode() Message { @@ -3343,11 +3383,11 @@ func (msg *IOSUserAnonymousID) TypeID() int { type IOSScreenChanges struct { message Timestamp uint64 - Length uint64 - X uint64 - Y uint64 - Width uint64 - Height uint64 + Length uint64 + X uint64 + Y uint64 + Width uint64 + Height uint64 } func (msg *IOSScreenChanges) Encode() []byte { @@ -3364,14 +3404,14 @@ func (msg *IOSScreenChanges) Encode() []byte { } func (msg *IOSScreenChanges) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSScreenChanges) Decode() Message { @@ -3384,10 +3424,10 @@ func (msg *IOSScreenChanges) TypeID() int { type IOSCrash struct { message - Timestamp uint64 - Length uint64 - Name string - Reason string + Timestamp uint64 + Length uint64 + Name string + Reason string Stacktrace string } @@ -3404,14 +3444,14 @@ func (msg *IOSCrash) Encode() []byte { } func (msg *IOSCrash) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSCrash) Decode() Message { @@ -3425,9 +3465,9 @@ func (msg *IOSCrash) TypeID() int { type IOSScreenEnter struct { message Timestamp uint64 - Length uint64 - Title string - ViewName string + Length uint64 + Title string + ViewName string } func (msg *IOSScreenEnter) Encode() []byte { @@ -3442,14 +3482,14 @@ func (msg *IOSScreenEnter) Encode() []byte { } func (msg *IOSScreenEnter) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSScreenEnter) Decode() Message { @@ -3463,9 +3503,9 @@ func (msg *IOSScreenEnter) TypeID() int { type IOSScreenLeave struct { message Timestamp uint64 - Length uint64 - Title string - ViewName string + Length uint64 + Title string + ViewName string } func (msg *IOSScreenLeave) Encode() []byte { @@ -3480,14 +3520,14 @@ func (msg *IOSScreenLeave) Encode() []byte { } func (msg *IOSScreenLeave) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSScreenLeave) Decode() Message { @@ -3501,10 +3541,10 @@ func (msg *IOSScreenLeave) TypeID() int { type IOSClickEvent struct { message Timestamp uint64 - Length uint64 - Label string - X uint64 - Y uint64 + Length uint64 + Label string + X uint64 + Y uint64 } func (msg *IOSClickEvent) Encode() []byte { @@ -3520,14 +3560,14 @@ func (msg *IOSClickEvent) Encode() []byte { } func (msg *IOSClickEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSClickEvent) Decode() Message { @@ -3540,11 +3580,11 @@ func (msg *IOSClickEvent) TypeID() int { type IOSInputEvent struct { message - Timestamp uint64 - Length uint64 - Value string + Timestamp uint64 + Length uint64 + Value string ValueMasked bool - Label string + Label string } func (msg *IOSInputEvent) Encode() []byte { @@ -3560,14 +3600,14 @@ func (msg *IOSInputEvent) Encode() []byte { } func (msg *IOSInputEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSInputEvent) Decode() Message { @@ -3581,9 +3621,9 @@ func (msg *IOSInputEvent) TypeID() int { type IOSPerformanceEvent struct { message Timestamp uint64 - Length uint64 - Name string - Value uint64 + Length uint64 + Name string + Value uint64 } func (msg *IOSPerformanceEvent) Encode() []byte { @@ -3598,14 +3638,14 @@ func (msg *IOSPerformanceEvent) Encode() []byte { } func (msg *IOSPerformanceEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSPerformanceEvent) Decode() Message { @@ -3619,9 +3659,9 @@ func (msg *IOSPerformanceEvent) TypeID() int { type IOSLog struct { message Timestamp uint64 - Length uint64 - Severity string - Content string + Length uint64 + Severity string + Content string } func (msg *IOSLog) Encode() []byte { @@ -3636,14 +3676,14 @@ func (msg *IOSLog) Encode() []byte { } func (msg *IOSLog) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSLog) Decode() Message { @@ -3657,8 +3697,8 @@ func (msg *IOSLog) TypeID() int { type IOSInternalError struct { message Timestamp uint64 - Length uint64 - Content string + Length uint64 + Content string } func (msg *IOSInternalError) Encode() []byte { @@ -3672,14 +3712,14 @@ func (msg *IOSInternalError) Encode() []byte { } func (msg *IOSInternalError) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSInternalError) Decode() Message { @@ -3693,14 +3733,14 @@ func (msg *IOSInternalError) TypeID() int { type IOSNetworkCall struct { message Timestamp uint64 - Length uint64 - Duration uint64 - Headers string - Body string - URL string - Success bool - Method string - Status uint64 + Length uint64 + Duration uint64 + Headers string + Body string + URL string + Success bool + Method string + Status uint64 } func (msg *IOSNetworkCall) Encode() []byte { @@ -3720,14 +3760,14 @@ func (msg *IOSNetworkCall) Encode() []byte { } func (msg *IOSNetworkCall) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSNetworkCall) Decode() Message { @@ -3741,19 +3781,19 @@ func (msg *IOSNetworkCall) TypeID() int { type IOSPerformanceAggregated struct { message TimestampStart uint64 - TimestampEnd uint64 - MinFPS uint64 - AvgFPS uint64 - MaxFPS uint64 - MinCPU uint64 - AvgCPU uint64 - MaxCPU uint64 - MinMemory uint64 - AvgMemory uint64 - MaxMemory uint64 - MinBattery uint64 - AvgBattery uint64 - MaxBattery uint64 + TimestampEnd uint64 + MinFPS uint64 + AvgFPS uint64 + MaxFPS uint64 + MinCPU uint64 + AvgCPU uint64 + MaxCPU uint64 + MinMemory uint64 + AvgMemory uint64 + MaxMemory uint64 + MinBattery uint64 + AvgBattery uint64 + MaxBattery uint64 } func (msg *IOSPerformanceAggregated) Encode() []byte { @@ -3778,14 +3818,14 @@ func (msg *IOSPerformanceAggregated) Encode() []byte { } func (msg *IOSPerformanceAggregated) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSPerformanceAggregated) Decode() Message { @@ -3798,11 +3838,11 @@ func (msg *IOSPerformanceAggregated) TypeID() int { type IOSIssueEvent struct { message - Timestamp uint64 - Type string + Timestamp uint64 + Type string ContextString string - Context string - Payload string + Context string + Payload string } func (msg *IOSIssueEvent) Encode() []byte { @@ -3818,14 +3858,14 @@ func (msg *IOSIssueEvent) Encode() []byte { } func (msg *IOSIssueEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data + encoded := msg.Encode() + if IsIOSType(msg.TypeID()) { + return encoded + } + data := make([]byte, len(encoded)+8) + copy(data[8:], encoded[:]) + binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) + return data } func (msg *IOSIssueEvent) Decode() Message { @@ -3835,3 +3875,4 @@ func (msg *IOSIssueEvent) Decode() Message { func (msg *IOSIssueEvent) TypeID() int { return 111 } + diff --git a/backend/pkg/messages/raw.go b/backend/pkg/messages/raw.go index b9dba5de2..fa3bad5e3 100644 --- a/backend/pkg/messages/raw.go +++ b/backend/pkg/messages/raw.go @@ -67,3 +67,10 @@ func (m *RawMessage) TypeID() int { func (m *RawMessage) Meta() *message { return m.meta } + +func (m *RawMessage) SessionID() uint64 { + if m.meta != nil { + return m.meta.SessionID() + } + return 0 +} diff --git a/backend/pkg/messages/read-message.go b/backend/pkg/messages/read-message.go index 1b0f579af..cd9f0bdd1 100644 --- a/backend/pkg/messages/read-message.go +++ b/backend/pkg/messages/read-message.go @@ -6,1720 +6,1831 @@ import ( "io" ) + func DecodeBatchMeta(reader io.Reader) (Message, error) { - var err error = nil - msg := &BatchMeta{} - if msg.PageNo, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.FirstIndex, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadInt(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &BatchMeta{} + if msg.PageNo, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.FirstIndex, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadInt(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeBatchMetadata(reader io.Reader) (Message, error) { - var err error = nil - msg := &BatchMetadata{} - if msg.Version, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.PageNo, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.FirstIndex, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadInt(reader); err != nil { - return nil, err - } - if msg.Location, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &BatchMetadata{} + if msg.Version, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.PageNo, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.FirstIndex, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadInt(reader); err != nil { + return nil, err + } + if msg.Location, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodePartitionedMessage(reader io.Reader) (Message, error) { - var err error = nil - msg := &PartitionedMessage{} - if msg.PartNo, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.PartTotal, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &PartitionedMessage{} + if msg.PartNo, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.PartTotal, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeTimestamp(reader io.Reader) (Message, error) { - var err error = nil - msg := &Timestamp{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &Timestamp{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeSessionStart(reader io.Reader) (Message, error) { - var err error = nil - msg := &SessionStart{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ProjectID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.TrackerVersion, err = ReadString(reader); err != nil { - return nil, err - } - if msg.RevID, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserUUID, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserAgent, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserOS, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserOSVersion, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserBrowser, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserBrowserVersion, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserDevice, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserDeviceType, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserDeviceMemorySize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.UserDeviceHeapSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.UserCountry, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserID, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &SessionStart{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ProjectID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.TrackerVersion, err = ReadString(reader); err != nil { + return nil, err + } + if msg.RevID, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserUUID, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserAgent, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserOS, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserOSVersion, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserBrowser, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserBrowserVersion, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserDevice, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserDeviceType, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserDeviceMemorySize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.UserDeviceHeapSize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.UserCountry, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserID, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeSessionEnd(reader io.Reader) (Message, error) { - var err error = nil - msg := &SessionEnd{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &SessionEnd{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeSetPageLocation(reader io.Reader) (Message, error) { - var err error = nil - msg := &SetPageLocation{} - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Referrer, err = ReadString(reader); err != nil { - return nil, err - } - if msg.NavigationStart, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &SetPageLocation{} + if msg.URL, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Referrer, err = ReadString(reader); err != nil { + return nil, err + } + if msg.NavigationStart, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeSetViewportSize(reader io.Reader) (Message, error) { - var err error = nil - msg := &SetViewportSize{} - if msg.Width, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Height, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &SetViewportSize{} + if msg.Width, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Height, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeSetViewportScroll(reader io.Reader) (Message, error) { - var err error = nil - msg := &SetViewportScroll{} - if msg.X, err = ReadInt(reader); err != nil { - return nil, err - } - if msg.Y, err = ReadInt(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &SetViewportScroll{} + if msg.X, err = ReadInt(reader); err != nil { + return nil, err + } + if msg.Y, err = ReadInt(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeCreateDocument(reader io.Reader) (Message, error) { - var err error = nil - msg := &CreateDocument{} - - return msg, err + var err error = nil + msg := &CreateDocument{} + + return msg, err } + func DecodeCreateElementNode(reader io.Reader) (Message, error) { - var err error = nil - msg := &CreateElementNode{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ParentID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.index, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Tag, err = ReadString(reader); err != nil { - return nil, err - } - if msg.SVG, err = ReadBoolean(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &CreateElementNode{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ParentID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.index, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Tag, err = ReadString(reader); err != nil { + return nil, err + } + if msg.SVG, err = ReadBoolean(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeCreateTextNode(reader io.Reader) (Message, error) { - var err error = nil - msg := &CreateTextNode{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ParentID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Index, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &CreateTextNode{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ParentID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Index, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeMoveNode(reader io.Reader) (Message, error) { - var err error = nil - msg := &MoveNode{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ParentID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Index, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &MoveNode{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ParentID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Index, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeRemoveNode(reader io.Reader) (Message, error) { - var err error = nil - msg := &RemoveNode{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &RemoveNode{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeSetNodeAttribute(reader io.Reader) (Message, error) { - var err error = nil - msg := &SetNodeAttribute{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &SetNodeAttribute{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeRemoveNodeAttribute(reader io.Reader) (Message, error) { - var err error = nil - msg := &RemoveNodeAttribute{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &RemoveNodeAttribute{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeSetNodeData(reader io.Reader) (Message, error) { - var err error = nil - msg := &SetNodeData{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Data, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &SetNodeData{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Data, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeSetCSSData(reader io.Reader) (Message, error) { - var err error = nil - msg := &SetCSSData{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Data, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &SetCSSData{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Data, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeSetNodeScroll(reader io.Reader) (Message, error) { - var err error = nil - msg := &SetNodeScroll{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.X, err = ReadInt(reader); err != nil { - return nil, err - } - if msg.Y, err = ReadInt(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &SetNodeScroll{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.X, err = ReadInt(reader); err != nil { + return nil, err + } + if msg.Y, err = ReadInt(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeSetInputTarget(reader io.Reader) (Message, error) { - var err error = nil - msg := &SetInputTarget{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Label, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &SetInputTarget{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Label, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeSetInputValue(reader io.Reader) (Message, error) { - var err error = nil - msg := &SetInputValue{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Mask, err = ReadInt(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &SetInputValue{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Mask, err = ReadInt(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeSetInputChecked(reader io.Reader) (Message, error) { - var err error = nil - msg := &SetInputChecked{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Checked, err = ReadBoolean(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &SetInputChecked{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Checked, err = ReadBoolean(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeMouseMove(reader io.Reader) (Message, error) { - var err error = nil - msg := &MouseMove{} - if msg.X, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Y, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &MouseMove{} + if msg.X, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Y, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeMouseClickDepricated(reader io.Reader) (Message, error) { - var err error = nil - msg := &MouseClickDepricated{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.HesitationTime, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Label, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &MouseClickDepricated{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.HesitationTime, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Label, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeConsoleLog(reader io.Reader) (Message, error) { - var err error = nil - msg := &ConsoleLog{} - if msg.Level, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &ConsoleLog{} + if msg.Level, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodePageLoadTiming(reader io.Reader) (Message, error) { - var err error = nil - msg := &PageLoadTiming{} - if msg.RequestStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ResponseStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ResponseEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.DomContentLoadedEventStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.DomContentLoadedEventEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.LoadEventStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.LoadEventEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.FirstPaint, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.FirstContentfulPaint, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &PageLoadTiming{} + if msg.RequestStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ResponseStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ResponseEnd, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.DomContentLoadedEventStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.DomContentLoadedEventEnd, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.LoadEventStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.LoadEventEnd, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.FirstPaint, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.FirstContentfulPaint, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodePageRenderTiming(reader io.Reader) (Message, error) { - var err error = nil - msg := &PageRenderTiming{} - if msg.SpeedIndex, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.VisuallyComplete, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.TimeToInteractive, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &PageRenderTiming{} + if msg.SpeedIndex, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.VisuallyComplete, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.TimeToInteractive, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeJSException(reader io.Reader) (Message, error) { - var err error = nil - msg := &JSException{} - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Message, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &JSException{} + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Message, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIntegrationEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &IntegrationEvent{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Source, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Message, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IntegrationEvent{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Source, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Message, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeRawCustomEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &RawCustomEvent{} - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &RawCustomEvent{} + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeUserID(reader io.Reader) (Message, error) { - var err error = nil - msg := &UserID{} - if msg.ID, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &UserID{} + if msg.ID, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeUserAnonymousID(reader io.Reader) (Message, error) { - var err error = nil - msg := &UserAnonymousID{} - if msg.ID, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &UserAnonymousID{} + if msg.ID, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeMetadata(reader io.Reader) (Message, error) { - var err error = nil - msg := &Metadata{} - if msg.Key, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &Metadata{} + if msg.Key, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodePageEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &PageEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Referrer, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Loaded, err = ReadBoolean(reader); err != nil { - return nil, err - } - if msg.RequestStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ResponseStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ResponseEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.DomContentLoadedEventStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.DomContentLoadedEventEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.LoadEventStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.LoadEventEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.FirstPaint, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.FirstContentfulPaint, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.SpeedIndex, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.VisuallyComplete, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.TimeToInteractive, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &PageEvent{} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.URL, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Referrer, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Loaded, err = ReadBoolean(reader); err != nil { + return nil, err + } + if msg.RequestStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ResponseStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ResponseEnd, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.DomContentLoadedEventStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.DomContentLoadedEventEnd, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.LoadEventStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.LoadEventEnd, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.FirstPaint, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.FirstContentfulPaint, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.SpeedIndex, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.VisuallyComplete, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.TimeToInteractive, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeInputEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &InputEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - if msg.ValueMasked, err = ReadBoolean(reader); err != nil { - return nil, err - } - if msg.Label, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &InputEvent{} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + if msg.ValueMasked, err = ReadBoolean(reader); err != nil { + return nil, err + } + if msg.Label, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeClickEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &ClickEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.HesitationTime, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Label, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Selector, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &ClickEvent{} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.HesitationTime, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Label, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Selector, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeErrorEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &ErrorEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Source, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Message, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &ErrorEvent{} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Source, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Message, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeResourceEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &ResourceEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.TTFB, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.HeaderSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.EncodedBodySize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.DecodedBodySize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Type, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Success, err = ReadBoolean(reader); err != nil { - return nil, err - } - if msg.Method, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Status, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &ResourceEvent{} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Duration, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.TTFB, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.HeaderSize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.EncodedBodySize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.DecodedBodySize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.URL, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Type, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Success, err = ReadBoolean(reader); err != nil { + return nil, err + } + if msg.Method, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Status, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeCustomEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &CustomEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &CustomEvent{} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeCSSInsertRule(reader io.Reader) (Message, error) { - var err error = nil - msg := &CSSInsertRule{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Rule, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Index, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &CSSInsertRule{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Rule, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Index, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeCSSDeleteRule(reader io.Reader) (Message, error) { - var err error = nil - msg := &CSSDeleteRule{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Index, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &CSSDeleteRule{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Index, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeFetch(reader io.Reader) (Message, error) { - var err error = nil - msg := &Fetch{} - if msg.Method, err = ReadString(reader); err != nil { - return nil, err - } - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Request, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Response, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Status, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &Fetch{} + if msg.Method, err = ReadString(reader); err != nil { + return nil, err + } + if msg.URL, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Request, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Response, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Status, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Duration, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeProfiler(reader io.Reader) (Message, error) { - var err error = nil - msg := &Profiler{} - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Args, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Result, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &Profiler{} + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Duration, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Args, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Result, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeOTable(reader io.Reader) (Message, error) { - var err error = nil - msg := &OTable{} - if msg.Key, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &OTable{} + if msg.Key, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeStateAction(reader io.Reader) (Message, error) { - var err error = nil - msg := &StateAction{} - if msg.Type, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &StateAction{} + if msg.Type, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeStateActionEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &StateActionEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Type, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &StateActionEvent{} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Type, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeRedux(reader io.Reader) (Message, error) { - var err error = nil - msg := &Redux{} - if msg.Action, err = ReadString(reader); err != nil { - return nil, err - } - if msg.State, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &Redux{} + if msg.Action, err = ReadString(reader); err != nil { + return nil, err + } + if msg.State, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Duration, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeVuex(reader io.Reader) (Message, error) { - var err error = nil - msg := &Vuex{} - if msg.Mutation, err = ReadString(reader); err != nil { - return nil, err - } - if msg.State, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &Vuex{} + if msg.Mutation, err = ReadString(reader); err != nil { + return nil, err + } + if msg.State, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeMobX(reader io.Reader) (Message, error) { - var err error = nil - msg := &MobX{} - if msg.Type, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &MobX{} + if msg.Type, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeNgRx(reader io.Reader) (Message, error) { - var err error = nil - msg := &NgRx{} - if msg.Action, err = ReadString(reader); err != nil { - return nil, err - } - if msg.State, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &NgRx{} + if msg.Action, err = ReadString(reader); err != nil { + return nil, err + } + if msg.State, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Duration, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeGraphQL(reader io.Reader) (Message, error) { - var err error = nil - msg := &GraphQL{} - if msg.OperationKind, err = ReadString(reader); err != nil { - return nil, err - } - if msg.OperationName, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Variables, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Response, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &GraphQL{} + if msg.OperationKind, err = ReadString(reader); err != nil { + return nil, err + } + if msg.OperationName, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Variables, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Response, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodePerformanceTrack(reader io.Reader) (Message, error) { - var err error = nil - msg := &PerformanceTrack{} - if msg.Frames, err = ReadInt(reader); err != nil { - return nil, err - } - if msg.Ticks, err = ReadInt(reader); err != nil { - return nil, err - } - if msg.TotalJSHeapSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.UsedJSHeapSize, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &PerformanceTrack{} + if msg.Frames, err = ReadInt(reader); err != nil { + return nil, err + } + if msg.Ticks, err = ReadInt(reader); err != nil { + return nil, err + } + if msg.TotalJSHeapSize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.UsedJSHeapSize, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeGraphQLEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &GraphQLEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.OperationKind, err = ReadString(reader); err != nil { - return nil, err - } - if msg.OperationName, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Variables, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Response, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &GraphQLEvent{} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.OperationKind, err = ReadString(reader); err != nil { + return nil, err + } + if msg.OperationName, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Variables, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Response, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeFetchEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &FetchEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Method, err = ReadString(reader); err != nil { - return nil, err - } - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Request, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Response, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Status, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &FetchEvent{} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Method, err = ReadString(reader); err != nil { + return nil, err + } + if msg.URL, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Request, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Response, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Status, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Duration, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeDOMDrop(reader io.Reader) (Message, error) { - var err error = nil - msg := &DOMDrop{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &DOMDrop{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeResourceTiming(reader io.Reader) (Message, error) { - var err error = nil - msg := &ResourceTiming{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.TTFB, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.HeaderSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.EncodedBodySize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.DecodedBodySize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Initiator, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &ResourceTiming{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Duration, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.TTFB, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.HeaderSize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.EncodedBodySize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.DecodedBodySize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.URL, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Initiator, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeConnectionInformation(reader io.Reader) (Message, error) { - var err error = nil - msg := &ConnectionInformation{} - if msg.Downlink, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Type, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &ConnectionInformation{} + if msg.Downlink, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Type, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeSetPageVisibility(reader io.Reader) (Message, error) { - var err error = nil - msg := &SetPageVisibility{} - if msg.hidden, err = ReadBoolean(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &SetPageVisibility{} + if msg.hidden, err = ReadBoolean(reader); err != nil { + return nil, err + } + return msg, err } + func DecodePerformanceTrackAggr(reader io.Reader) (Message, error) { - var err error = nil - msg := &PerformanceTrackAggr{} - if msg.TimestampStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.TimestampEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MinFPS, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.AvgFPS, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MaxFPS, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MinCPU, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.AvgCPU, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MaxCPU, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MinTotalJSHeapSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.AvgTotalJSHeapSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MaxTotalJSHeapSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MinUsedJSHeapSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.AvgUsedJSHeapSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MaxUsedJSHeapSize, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &PerformanceTrackAggr{} + if msg.TimestampStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.TimestampEnd, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MinFPS, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.AvgFPS, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MaxFPS, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MinCPU, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.AvgCPU, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MaxCPU, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MinTotalJSHeapSize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.AvgTotalJSHeapSize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MaxTotalJSHeapSize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MinUsedJSHeapSize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.AvgUsedJSHeapSize, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MaxUsedJSHeapSize, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeLongTask(reader io.Reader) (Message, error) { - var err error = nil - msg := &LongTask{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Context, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ContainerType, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ContainerSrc, err = ReadString(reader); err != nil { - return nil, err - } - if msg.ContainerId, err = ReadString(reader); err != nil { - return nil, err - } - if msg.ContainerName, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &LongTask{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Duration, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Context, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ContainerType, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ContainerSrc, err = ReadString(reader); err != nil { + return nil, err + } + if msg.ContainerId, err = ReadString(reader); err != nil { + return nil, err + } + if msg.ContainerName, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeSetNodeAttributeURLBased(reader io.Reader) (Message, error) { - var err error = nil - msg := &SetNodeAttributeURLBased{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - if msg.BaseURL, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &SetNodeAttributeURLBased{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + if msg.BaseURL, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeSetCSSDataURLBased(reader io.Reader) (Message, error) { - var err error = nil - msg := &SetCSSDataURLBased{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Data, err = ReadString(reader); err != nil { - return nil, err - } - if msg.BaseURL, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &SetCSSDataURLBased{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Data, err = ReadString(reader); err != nil { + return nil, err + } + if msg.BaseURL, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIssueEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &IssueEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Type, err = ReadString(reader); err != nil { - return nil, err - } - if msg.ContextString, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Context, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IssueEvent{} + if msg.MessageID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Type, err = ReadString(reader); err != nil { + return nil, err + } + if msg.ContextString, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Context, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeTechnicalInfo(reader io.Reader) (Message, error) { - var err error = nil - msg := &TechnicalInfo{} - if msg.Type, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &TechnicalInfo{} + if msg.Type, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeCustomIssue(reader io.Reader) (Message, error) { - var err error = nil - msg := &CustomIssue{} - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &CustomIssue{} + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeAssetCache(reader io.Reader) (Message, error) { - var err error = nil - msg := &AssetCache{} - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &AssetCache{} + if msg.URL, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeCSSInsertRuleURLBased(reader io.Reader) (Message, error) { - var err error = nil - msg := &CSSInsertRuleURLBased{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Rule, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Index, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.BaseURL, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &CSSInsertRuleURLBased{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Rule, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Index, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.BaseURL, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeMouseClick(reader io.Reader) (Message, error) { - var err error = nil - msg := &MouseClick{} - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.HesitationTime, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Label, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Selector, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &MouseClick{} + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.HesitationTime, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Label, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Selector, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeCreateIFrameDocument(reader io.Reader) (Message, error) { - var err error = nil - msg := &CreateIFrameDocument{} - if msg.FrameID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &CreateIFrameDocument{} + if msg.FrameID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeAdoptedSSReplaceURLBased(reader io.Reader) (Message, error) { - var err error = nil - msg := &AdoptedSSReplaceURLBased{} - if msg.SheetID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Text, err = ReadString(reader); err != nil { - return nil, err - } - if msg.BaseURL, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &AdoptedSSReplaceURLBased{} + if msg.SheetID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Text, err = ReadString(reader); err != nil { + return nil, err + } + if msg.BaseURL, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeAdoptedSSReplace(reader io.Reader) (Message, error) { - var err error = nil - msg := &AdoptedSSReplace{} - if msg.SheetID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Text, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &AdoptedSSReplace{} + if msg.SheetID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Text, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeAdoptedSSInsertRuleURLBased(reader io.Reader) (Message, error) { - var err error = nil - msg := &AdoptedSSInsertRuleURLBased{} - if msg.SheetID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Rule, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Index, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.BaseURL, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &AdoptedSSInsertRuleURLBased{} + if msg.SheetID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Rule, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Index, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.BaseURL, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeAdoptedSSInsertRule(reader io.Reader) (Message, error) { - var err error = nil - msg := &AdoptedSSInsertRule{} - if msg.SheetID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Rule, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Index, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &AdoptedSSInsertRule{} + if msg.SheetID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Rule, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Index, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeAdoptedSSDeleteRule(reader io.Reader) (Message, error) { - var err error = nil - msg := &AdoptedSSDeleteRule{} - if msg.SheetID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Index, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &AdoptedSSDeleteRule{} + if msg.SheetID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Index, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeAdoptedSSAddOwner(reader io.Reader) (Message, error) { - var err error = nil - msg := &AdoptedSSAddOwner{} - if msg.SheetID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &AdoptedSSAddOwner{} + if msg.SheetID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeAdoptedSSRemoveOwner(reader io.Reader) (Message, error) { - var err error = nil - msg := &AdoptedSSRemoveOwner{} - if msg.SheetID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ID, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &AdoptedSSRemoveOwner{} + if msg.SheetID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ID, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeZustand(reader io.Reader) (Message, error) { - var err error = nil - msg := &Zustand{} - if msg.Mutation, err = ReadString(reader); err != nil { - return nil, err - } - if msg.State, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &Zustand{} + if msg.Mutation, err = ReadString(reader); err != nil { + return nil, err + } + if msg.State, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + +func DecodeSessionSearch(reader io.Reader) (Message, error) { + var err error = nil + msg := &SessionSearch{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Partition, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err +} + + func DecodeIOSBatchMeta(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSBatchMeta{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.FirstIndex, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSBatchMeta{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.FirstIndex, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIOSSessionStart(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSSessionStart{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ProjectID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.TrackerVersion, err = ReadString(reader); err != nil { - return nil, err - } - if msg.RevID, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserUUID, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserOS, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserOSVersion, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserDevice, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserDeviceType, err = ReadString(reader); err != nil { - return nil, err - } - if msg.UserCountry, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSSessionStart{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.ProjectID, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.TrackerVersion, err = ReadString(reader); err != nil { + return nil, err + } + if msg.RevID, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserUUID, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserOS, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserOSVersion, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserDevice, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserDeviceType, err = ReadString(reader); err != nil { + return nil, err + } + if msg.UserCountry, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIOSSessionEnd(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSSessionEnd{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSSessionEnd{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIOSMetadata(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSMetadata{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Key, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSMetadata{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Key, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIOSCustomEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSCustomEvent{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSCustomEvent{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIOSUserID(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSUserID{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSUserID{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIOSUserAnonymousID(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSUserAnonymousID{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSUserAnonymousID{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIOSScreenChanges(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSScreenChanges{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.X, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Y, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Width, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Height, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSScreenChanges{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.X, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Y, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Width, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Height, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIOSCrash(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSCrash{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Reason, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Stacktrace, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSCrash{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Reason, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Stacktrace, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIOSScreenEnter(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSScreenEnter{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Title, err = ReadString(reader); err != nil { - return nil, err - } - if msg.ViewName, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSScreenEnter{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Title, err = ReadString(reader); err != nil { + return nil, err + } + if msg.ViewName, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIOSScreenLeave(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSScreenLeave{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Title, err = ReadString(reader); err != nil { - return nil, err - } - if msg.ViewName, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSScreenLeave{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Title, err = ReadString(reader); err != nil { + return nil, err + } + if msg.ViewName, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIOSClickEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSClickEvent{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Label, err = ReadString(reader); err != nil { - return nil, err - } - if msg.X, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Y, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSClickEvent{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Label, err = ReadString(reader); err != nil { + return nil, err + } + if msg.X, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Y, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIOSInputEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSInputEvent{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - if msg.ValueMasked, err = ReadBoolean(reader); err != nil { - return nil, err - } - if msg.Label, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSInputEvent{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadString(reader); err != nil { + return nil, err + } + if msg.ValueMasked, err = ReadBoolean(reader); err != nil { + return nil, err + } + if msg.Label, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIOSPerformanceEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSPerformanceEvent{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSPerformanceEvent{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Name, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Value, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIOSLog(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSLog{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Severity, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Content, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSLog{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Severity, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Content, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIOSInternalError(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSInternalError{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Content, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSInternalError{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Content, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIOSNetworkCall(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSNetworkCall{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Length, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Headers, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Body, err = ReadString(reader); err != nil { - return nil, err - } - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Success, err = ReadBoolean(reader); err != nil { - return nil, err - } - if msg.Method, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Status, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSNetworkCall{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Length, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Duration, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Headers, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Body, err = ReadString(reader); err != nil { + return nil, err + } + if msg.URL, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Success, err = ReadBoolean(reader); err != nil { + return nil, err + } + if msg.Method, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Status, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIOSPerformanceAggregated(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSPerformanceAggregated{} - if msg.TimestampStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.TimestampEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MinFPS, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.AvgFPS, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MaxFPS, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MinCPU, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.AvgCPU, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MaxCPU, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MinMemory, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.AvgMemory, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MaxMemory, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MinBattery, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.AvgBattery, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.MaxBattery, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSPerformanceAggregated{} + if msg.TimestampStart, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.TimestampEnd, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MinFPS, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.AvgFPS, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MaxFPS, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MinCPU, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.AvgCPU, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MaxCPU, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MinMemory, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.AvgMemory, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MaxMemory, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MinBattery, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.AvgBattery, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.MaxBattery, err = ReadUint(reader); err != nil { + return nil, err + } + return msg, err } + func DecodeIOSIssueEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &IOSIssueEvent{} - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Type, err = ReadString(reader); err != nil { - return nil, err - } - if msg.ContextString, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Context, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err + var err error = nil + msg := &IOSIssueEvent{} + if msg.Timestamp, err = ReadUint(reader); err != nil { + return nil, err + } + if msg.Type, err = ReadString(reader); err != nil { + return nil, err + } + if msg.ContextString, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Context, err = ReadString(reader); err != nil { + return nil, err + } + if msg.Payload, err = ReadString(reader); err != nil { + return nil, err + } + return msg, err } + + func ReadMessage(t uint64, reader io.Reader) (Message, error) { switch t { @@ -1954,6 +2065,9 @@ func ReadMessage(t uint64, reader io.Reader) (Message, error) { case 79: return DecodeZustand(reader) + case 127: + return DecodeSessionSearch(reader) + case 107: return DecodeIOSBatchMeta(reader) diff --git a/backend/pkg/queue/import.go b/backend/pkg/queue/import.go index d5daa1dd5..978798ce2 100644 --- a/backend/pkg/queue/import.go +++ b/backend/pkg/queue/import.go @@ -1,12 +1,13 @@ package queue import ( + "openreplay/backend/pkg/messages" "openreplay/backend/pkg/queue/types" "openreplay/backend/pkg/redisstream" ) -func NewConsumer(group string, topics []string, handler types.MessageHandler, _ bool, _ int) types.Consumer { - return redisstream.NewConsumer(group, topics, handler) +func NewConsumer(group string, topics []string, iterator messages.MessageIterator, _ bool, _ int) types.Consumer { + return redisstream.NewConsumer(group, topics, iterator) } func NewProducer(_ int, _ bool) types.Producer { diff --git a/backend/pkg/queue/messages.go b/backend/pkg/queue/messages.go deleted file mode 100644 index f52813492..000000000 --- a/backend/pkg/queue/messages.go +++ /dev/null @@ -1,12 +0,0 @@ -package queue - -import ( - "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/queue/types" -) - -func NewMessageConsumer(group string, topics []string, handler types.RawMessageHandler, autoCommit bool, messageSizeLimit int) types.Consumer { - return NewConsumer(group, topics, func(sessionID uint64, value []byte, meta *types.Meta) { - handler(sessionID, messages.NewIterator(value), meta) - }, autoCommit, messageSizeLimit) -} diff --git a/backend/pkg/queue/types/types.go b/backend/pkg/queue/types/types.go index 0f196c608..48408ce10 100644 --- a/backend/pkg/queue/types/types.go +++ b/backend/pkg/queue/types/types.go @@ -1,30 +1,17 @@ package types -import ( - "openreplay/backend/pkg/messages" -) - +// Consumer reads batches of session data from queue (redis or kafka) type Consumer interface { ConsumeNext() error - Commit() error CommitBack(gap int64) error + Commit() error Close() - HasFirstPartition() bool } +// Producer sends batches of session data to queue (redis or kafka) type Producer interface { Produce(topic string, key uint64, value []byte) error ProduceToPartition(topic string, partition, key uint64, value []byte) error - Close(timeout int) Flush(timeout int) + Close(timeout int) } - -type Meta struct { - ID uint64 - Topic string - Timestamp int64 -} - -type MessageHandler func(uint64, []byte, *Meta) -type DecodedMessageHandler func(uint64, messages.Message, *Meta) -type RawMessageHandler func(uint64, messages.Iterator, *Meta) diff --git a/backend/pkg/redisstream/consumer.go b/backend/pkg/redisstream/consumer.go index bae70120d..fed9a7511 100644 --- a/backend/pkg/redisstream/consumer.go +++ b/backend/pkg/redisstream/consumer.go @@ -3,6 +3,7 @@ package redisstream import ( "log" "net" + "openreplay/backend/pkg/messages" "sort" "strconv" "strings" @@ -10,8 +11,6 @@ import ( _redis "github.com/go-redis/redis" "github.com/pkg/errors" - - "openreplay/backend/pkg/queue/types" ) type idsInfo struct { @@ -21,16 +20,16 @@ type idsInfo struct { type streamPendingIDsMap map[string]*idsInfo type Consumer struct { - redis *_redis.Client - streams []string - group string - messageHandler types.MessageHandler - idsPending streamPendingIDsMap - lastTs int64 - autoCommit bool + redis *_redis.Client + streams []string + group string + messageIterator messages.MessageIterator + idsPending streamPendingIDsMap + lastTs int64 + autoCommit bool } -func NewConsumer(group string, streams []string, messageHandler types.MessageHandler) *Consumer { +func NewConsumer(group string, streams []string, messageIterator messages.MessageIterator) *Consumer { redis := getRedisClient() for _, stream := range streams { err := redis.XGroupCreateMkStream(stream, group, "0").Err() @@ -52,12 +51,12 @@ func NewConsumer(group string, streams []string, messageHandler types.MessageHan } return &Consumer{ - redis: redis, - messageHandler: messageHandler, - streams: streams, - group: group, - autoCommit: true, - idsPending: idsPending, + redis: redis, + messageIterator: messageIterator, + streams: streams, + group: group, + autoCommit: true, + idsPending: idsPending, } } @@ -102,11 +101,8 @@ func (c *Consumer) ConsumeNext() error { if idx > 0x1FFF { return errors.New("Too many messages per ms in redis") } - c.messageHandler(sessionID, []byte(valueString), &types.Meta{ - Topic: r.Stream, - Timestamp: int64(ts), - ID: ts<<13 | (idx & 0x1FFF), // Max: 4096 messages/ms for 69 years - }) + bID := ts<<13 | (idx & 0x1FFF) // Max: 4096 messages/ms for 69 years + c.messageIterator.Iterate([]byte(valueString), messages.NewBatchInfo(sessionID, r.Stream, bID, int64(ts))) if c.autoCommit { if err = c.redis.XAck(r.Stream, c.group, m.ID).Err(); err != nil { return errors.Wrapf(err, "Acknoledgment error for messageID %v", m.ID) @@ -161,7 +157,3 @@ func (c *Consumer) CommitBack(gap int64) error { func (c *Consumer) Close() { // noop } - -func (c *Consumer) HasFirstPartition() bool { - return false -} diff --git a/backend/pkg/sessions/builder.go b/backend/pkg/sessions/builder.go index c9cb0b6dd..d21fd890a 100644 --- a/backend/pkg/sessions/builder.go +++ b/backend/pkg/sessions/builder.go @@ -66,6 +66,7 @@ func (b *builder) handleMessage(message Message, messageID uint64) { b.lastSystemTime = time.Now() for _, p := range b.processors { if rm := p.Handle(message, messageID, b.timestamp); rm != nil { + rm.Meta().SetMeta(message.Meta()) b.readyMsgs = append(b.readyMsgs, rm) } } diff --git a/backend/pkg/sessions/builderMap.go b/backend/pkg/sessions/builderMap.go index f26993c13..bdf8e8686 100644 --- a/backend/pkg/sessions/builderMap.go +++ b/backend/pkg/sessions/builderMap.go @@ -30,7 +30,9 @@ func (m *builderMap) GetBuilder(sessionID uint64) *builder { return b } -func (m *builderMap) HandleMessage(sessionID uint64, msg Message, messageID uint64) { +func (m *builderMap) HandleMessage(msg Message) { + sessionID := msg.SessionID() + messageID := msg.Meta().Index b := m.GetBuilder(sessionID) b.handleMessage(msg, messageID) } @@ -39,6 +41,7 @@ func (m *builderMap) iterateSessionReadyMessages(sessionID uint64, b *builder, i if b.ended || b.lastSystemTime.Add(FORCE_DELETE_TIMEOUT).Before(time.Now()) { for _, p := range b.processors { if rm := p.Build(); rm != nil { + rm.Meta().SetSessionID(sessionID) b.readyMsgs = append(b.readyMsgs, rm) } } diff --git a/ee/backend/internal/db/datasaver/messages.go b/ee/backend/internal/db/datasaver/messages.go index 3187a0c91..ac71a3e91 100644 --- a/ee/backend/internal/db/datasaver/messages.go +++ b/ee/backend/internal/db/datasaver/messages.go @@ -6,7 +6,8 @@ import ( "openreplay/backend/pkg/messages" ) -func (mi *Saver) InsertMessage(sessionID uint64, msg messages.Message) error { +func (mi *Saver) InsertMessage(msg messages.Message) error { + sessionID := msg.SessionID() switch m := msg.(type) { // Common case *messages.Metadata: diff --git a/ee/backend/internal/db/datasaver/stats.go b/ee/backend/internal/db/datasaver/stats.go index e018a2575..9cdeb6050 100644 --- a/ee/backend/internal/db/datasaver/stats.go +++ b/ee/backend/internal/db/datasaver/stats.go @@ -38,6 +38,6 @@ func (si *Saver) InsertStats(session *types.Session, msg messages.Message) error return nil } -func (si *Saver) CommitStats(optimize bool) error { +func (si *Saver) CommitStats() error { return si.ch.Commit() } diff --git a/ee/backend/pkg/failover/failover.go b/ee/backend/pkg/failover/failover.go index d69a2c86a..5d3ef534a 100644 --- a/ee/backend/pkg/failover/failover.go +++ b/ee/backend/pkg/failover/failover.go @@ -55,19 +55,16 @@ func NewSessionFinder(cfg *config.Config, stg *storage.Storage) (SessionFinder, done: make(chan struct{}, 1), } finder.producer = queue.NewProducer(cfg.MessageSizeLimit, false) - finder.consumer = queue.NewMessageConsumer( + finder.consumer = queue.NewConsumer( cfg.GroupFailover, []string{ cfg.TopicFailover, }, - func(sessionID uint64, iter messages.Iterator, meta *types.Meta) { - for iter.Next() { - if iter.Type() == 127 { - m := iter.Message().Decode().(*messages.SessionSearch) - finder.findSession(sessionID, m.Timestamp, m.Partition) - } - } - }, + messages.NewMessageIterator( + func(msg messages.Message) { + m := msg.(*messages.SessionSearch) + finder.findSession(m.SessionID(), m.Timestamp, m.Partition) + }, []int{messages.MsgSessionSearch}, true), true, cfg.MessageSizeLimit, ) @@ -128,7 +125,7 @@ func (s *sessionFinderImpl) nextPartition(partition uint64) uint64 { // Create sessionSearch message and send it to queue func (s *sessionFinderImpl) sendSearchMessage(sessionID, timestamp, partition uint64) { msg := &messages.SessionSearch{Timestamp: timestamp, Partition: partition} - if err := s.producer.ProduceToPartition(s.topicName, partition, sessionID, messages.Encode(msg)); err != nil { + if err := s.producer.ProduceToPartition(s.topicName, partition, sessionID, msg.Encode()); err != nil { log.Printf("can't send SessionSearch to failover topic: %s; sessID: %d", err, sessionID) } } diff --git a/ee/backend/pkg/kafka/consumer.go b/ee/backend/pkg/kafka/consumer.go index ca37917f1..f86912938 100644 --- a/ee/backend/pkg/kafka/consumer.go +++ b/ee/backend/pkg/kafka/consumer.go @@ -2,6 +2,7 @@ package kafka import ( "log" + "openreplay/backend/pkg/messages" "os" "time" @@ -9,16 +10,15 @@ import ( "gopkg.in/confluentinc/confluent-kafka-go.v1/kafka" "openreplay/backend/pkg/env" - "openreplay/backend/pkg/queue/types" ) type Message = kafka.Message type Consumer struct { - c *kafka.Consumer - messageHandler types.MessageHandler - commitTicker *time.Ticker - pollTimeout uint + c *kafka.Consumer + messageIterator messages.MessageIterator + commitTicker *time.Ticker + pollTimeout uint lastReceivedPrtTs map[int32]int64 } @@ -26,7 +26,7 @@ type Consumer struct { func NewConsumer( group string, topics []string, - messageHandler types.MessageHandler, + messageIterator messages.MessageIterator, autoCommit bool, messageSizeLimit int, ) *Consumer { @@ -70,7 +70,7 @@ func NewConsumer( return &Consumer{ c: c, - messageHandler: messageHandler, + messageIterator: messageIterator, commitTicker: commitTicker, pollTimeout: 200, lastReceivedPrtTs: make(map[int32]int64), @@ -171,11 +171,13 @@ func (consumer *Consumer) ConsumeNext() error { return errors.Wrap(e.TopicPartition.Error, "Consumer Partition Error") } ts := e.Timestamp.UnixMilli() - consumer.messageHandler(decodeKey(e.Key), e.Value, &types.Meta{ - Topic: *(e.TopicPartition.Topic), - ID: uint64(e.TopicPartition.Offset), - Timestamp: ts, - }) + consumer.messageIterator.Iterate( + e.Value, + messages.NewBatchInfo( + decodeKey(e.Key), + *(e.TopicPartition.Topic), + uint64(e.TopicPartition.Offset), + ts)) consumer.lastReceivedPrtTs[e.TopicPartition.Partition] = ts case kafka.Error: if e.Code() == kafka.ErrAllBrokersDown || e.Code() == kafka.ErrMaxPollExceeded { @@ -194,16 +196,3 @@ func (consumer *Consumer) Close() { log.Printf("Kafka consumer close error: %v", err) } } - -func (consumer *Consumer) HasFirstPartition() bool { - assigned, err := consumer.c.Assignment() - if err != nil { - return false - } - for _, p := range assigned { - if p.Partition == 1 { - return true - } - } - return false -} diff --git a/ee/backend/pkg/queue/import.go b/ee/backend/pkg/queue/import.go index a0c6a02f1..2aaf22f94 100644 --- a/ee/backend/pkg/queue/import.go +++ b/ee/backend/pkg/queue/import.go @@ -3,12 +3,13 @@ package queue import ( "openreplay/backend/pkg/kafka" "openreplay/backend/pkg/license" + "openreplay/backend/pkg/messages" "openreplay/backend/pkg/queue/types" ) -func NewConsumer(group string, topics []string, handler types.MessageHandler, autoCommit bool, messageSizeLimit int) types.Consumer { +func NewConsumer(group string, topics []string, iterator messages.MessageIterator, autoCommit bool, messageSizeLimit int) types.Consumer { license.CheckLicense() - return kafka.NewConsumer(group, topics, handler, autoCommit, messageSizeLimit) + return kafka.NewConsumer(group, topics, iterator, autoCommit, messageSizeLimit) } func NewProducer(messageSizeLimit int, useBatch bool) types.Producer { diff --git a/mobs/messages.rb b/mobs/messages.rb index 6d21c2e05..872e65d58 100644 --- a/mobs/messages.rb +++ b/mobs/messages.rb @@ -469,4 +469,9 @@ message 79, 'Zustand' do string 'State' end +message 127, 'SessionSearch' do + uint 'Timestamp' + uint 'Partition' +end + # 80 -- 90 reserved \ No newline at end of file diff --git a/mobs/run.rb b/mobs/run.rb index 67a9b4eea..31adfcba1 100644 --- a/mobs/run.rb +++ b/mobs/run.rb @@ -113,7 +113,7 @@ $ids = [] $messages = [] def message(id, name, opts = {}, &block) raise "id duplicated #{name}" if $ids.include? id - raise "id is too big #{name}" if id > 120 + raise "id is too big #{name}" if id > 127 $ids << id opts[:id] = id opts[:name] = name From f23d54a9cdb2b605c96d1820a6415d2d32ec2a68 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Wed, 28 Sep 2022 18:08:58 +0530 Subject: [PATCH 055/592] change(ui) - assist agentToken --- frontend/app/player/MessageDistributor/MessageDistributor.ts | 2 +- .../app/player/MessageDistributor/managers/AssistManager.ts | 5 ++++- frontend/app/types/session/session.ts | 3 ++- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/frontend/app/player/MessageDistributor/MessageDistributor.ts b/frontend/app/player/MessageDistributor/MessageDistributor.ts index c5ac4f6b5..b342fd4bf 100644 --- a/frontend/app/player/MessageDistributor/MessageDistributor.ts +++ b/frontend/app/player/MessageDistributor/MessageDistributor.ts @@ -109,7 +109,7 @@ export default class MessageDistributor extends StatedScreen { if (live) { initListsDepr({}) - this.assistManager.connect(); + this.assistManager.connect(this.session.agentToken); } else { this.activityManager = new ActivityManager(this.session.duration.milliseconds); /* == REFACTOR_ME == */ diff --git a/frontend/app/player/MessageDistributor/managers/AssistManager.ts b/frontend/app/player/MessageDistributor/managers/AssistManager.ts index 5a4945fbe..d979d52f8 100644 --- a/frontend/app/player/MessageDistributor/managers/AssistManager.ts +++ b/frontend/app/player/MessageDistributor/managers/AssistManager.ts @@ -119,7 +119,7 @@ export default class AssistManager { } private socket: Socket | null = null - connect() { + connect(agentToken: string) { const jmr = new JSONRawMessageReader() const reader = new MStreamReader(jmr) let waitingForMessages = true @@ -146,6 +146,9 @@ export default class AssistManager { // @ts-ignore WTF, socket.io ??? const socket: Socket = this.socket = io(urlObject.origin, { path: '/ws-assist/socket', + auth: { + token: agentToken + }, query: { peerId: this.peerID, identity: "agent", diff --git a/frontend/app/types/session/session.ts b/frontend/app/types/session/session.ts index d66670963..9beee5cd0 100644 --- a/frontend/app/types/session/session.ts +++ b/frontend/app/types/session/session.ts @@ -81,7 +81,8 @@ export default Record({ revId: '', userSessionsCount: 0, agentIds: [], - isCallActive: false + isCallActive: false, + agentToken: '' }, { fromJS:({ startTs=0, From fa9c96d1394df6d1911bb3ce81a7fda46e99ef40 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 28 Sep 2022 15:17:11 +0200 Subject: [PATCH 056/592] feat(chalice): changed assist agent secret --- api/chalicelib/core/assist.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 3cd3cec25..b4fc3a9f8 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -93,7 +93,7 @@ def __get_agent_token(project_id, project_key, session_id): "iss": config("JWT_ISSUER"), "aud": f"openreplay:agent" }, - key=config("jwt_secret"), + key=config("ASSIST_JWT_SECRET"), algorithm=config("jwt_algorithm") ) From 6cb1f82d7d6b1a935a62b8aca5c3a7040d69ef07 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 28 Sep 2022 15:34:05 +0200 Subject: [PATCH 057/592] feat(chalice): changed notes --- api/chalicelib/core/sessions_notes.py | 1 + 1 file changed, 1 insertion(+) diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index 916af221a..6c66ebcf5 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -20,6 +20,7 @@ def get_session_notes(tenant_id, project_id, session_id, user_id): {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, "session_id": session_id}) + cur.execute(query=query) rows = cur.fetchall() rows = helper.list_to_camel_case(rows) for row in rows: From f2b10bceb4f3b82ec236cf96e3512f1e84c2a237 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 28 Sep 2022 15:44:23 +0200 Subject: [PATCH 058/592] feat(chalice): changed notes --- api/chalicelib/core/sessions_notes.py | 4 +- ee/api/.gitignore | 1 - ee/api/chalicelib/core/sessions_notes.py | 106 +++++++++++++++++++++++ ee/api/clean.sh | 1 - 4 files changed, 108 insertions(+), 4 deletions(-) create mode 100644 ee/api/chalicelib/core/sessions_notes.py diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index 6c66ebcf5..c1eec5dfc 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -15,7 +15,7 @@ def get_session_notes(tenant_id, project_id, session_id, user_id): AND sessions_notes.deleted_at IS NULL AND sessions_notes.session_id = %(session_id)s AND (sessions_notes.user_id = %(user_id)s - OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s) + OR sessions_notes.is_public) ORDER BY created_at DESC;""", {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, "session_id": session_id}) @@ -36,7 +36,7 @@ def get_all_notes(tenant_id, project_id, user_id): WHERE sessions_notes.project_id = %(project_id)s AND sessions_notes.deleted_at IS NULL AND (sessions_notes.user_id = %(user_id)s - OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s) + OR sessions_notes.is_public) ORDER BY created_at DESC;""", {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}) diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 924060617..811b00301 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -213,7 +213,6 @@ Pipfile /chalicelib/core/sessions_assignments.py /chalicelib/core/sessions_metas.py /chalicelib/core/sessions_mobs.py -/chalicelib/core/sessions_notes.py #exp /chalicelib/core/significance.py /chalicelib/core/slack.py /chalicelib/core/socket_ios.py diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py new file mode 100644 index 000000000..6c66ebcf5 --- /dev/null +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -0,0 +1,106 @@ +import json + +import schemas +from chalicelib.core import users +from chalicelib.utils import pg_client, helper, dev +from chalicelib.utils.TimeUTC import TimeUTC + + +def get_session_notes(tenant_id, project_id, session_id, user_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify(f"""SELECT sessions_notes.* + FROM sessions_notes + INNER JOIN users USING (user_id) + WHERE sessions_notes.project_id = %(project_id)s + AND sessions_notes.deleted_at IS NULL + AND sessions_notes.session_id = %(session_id)s + AND (sessions_notes.user_id = %(user_id)s + OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s) + ORDER BY created_at DESC;""", + {"project_id": project_id, "user_id": user_id, + "tenant_id": tenant_id, "session_id": session_id}) + + cur.execute(query=query) + rows = cur.fetchall() + rows = helper.list_to_camel_case(rows) + for row in rows: + row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) + return rows + + +def get_all_notes(tenant_id, project_id, user_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify(f"""SELECT sessions_notes.* + FROM sessions_notes + INNER JOIN users USING (user_id) + WHERE sessions_notes.project_id = %(project_id)s + AND sessions_notes.deleted_at IS NULL + AND (sessions_notes.user_id = %(user_id)s + OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s) + ORDER BY created_at DESC;""", + {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}) + + cur.execute(query=query) + rows = cur.fetchall() + rows = helper.list_to_camel_case(rows) + for row in rows: + row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) + return rows + + +def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNoteSchema): + with pg_client.PostgresClient() as cur: + query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tags, session_id, project_id, timestamp, is_public) + VALUES (%(message)s, %(user_id)s, %(tags)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s) + RETURNING *;""", + {"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.dict()}) + cur.execute(query) + result = cur.fetchone() + return helper.dict_to_camel_case(result) + + +def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNoteSchema): + sub_query = [] + if data.message is not None: + sub_query.append("message = %(message)s") + if data.tags is not None: + sub_query.append("tags = %(tags)s") + if data.is_public is not None: + sub_query.append("is_public = %(is_public)s") + if data.timestamp is not None: + sub_query.append("timestamp = %(timestamp)s") + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify(f"""\ + UPDATE public.sessions_notes + SET + {" ,".join(sub_query)} + WHERE + project_id = %(project_id)s + AND user_id = %(user_id)s + AND note_id = %(note_id)s + AND deleted_at ISNULL + RETURNING *;""", + {"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.dict()}) + ) + row = helper.dict_to_camel_case(cur.fetchone()) + if row: + row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) + return row + + +def delete(tenant_id, user_id, project_id, note_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""\ + UPDATE public.sessions_notes + SET + deleted_at = timezone('utc'::text, now()) + WHERE + note_id = %(note_id)s + AND project_id = %(project_id)s\ + AND user_id = %(user_id)s + AND deleted_at ISNULL;""", + {"project_id": project_id, "user_id": user_id, "note_id": note_id}) + ) + return {"data": {"state": "success"}} diff --git a/ee/api/clean.sh b/ee/api/clean.sh index 53607cb25..ce58fe45e 100755 --- a/ee/api/clean.sh +++ b/ee/api/clean.sh @@ -35,7 +35,6 @@ rm -rf ./chalicelib/core/mobile.py rm -rf ./chalicelib/core/sessions_assignments.py rm -rf ./chalicelib/core/sessions_metas.py rm -rf ./chalicelib/core/sessions_mobs.py -rm -rf ./chalicelib/core/sessions_notes.py #exp rm -rf ./chalicelib/core/significance.py rm -rf ./chalicelib/core/slack.py rm -rf ./chalicelib/core/socket_ios.py From a7487cd371f9f823bce16ea41d68ccdb4723ca44 Mon Sep 17 00:00:00 2001 From: Alexander Zavorotynskiy Date: Wed, 28 Sep 2022 15:47:30 +0200 Subject: [PATCH 059/592] feat(backend): removed debug log from configurator module --- backend/internal/config/configurator/configurator.go | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/backend/internal/config/configurator/configurator.go b/backend/internal/config/configurator/configurator.go index 3ffa8f7d7..5335d4a91 100644 --- a/backend/internal/config/configurator/configurator.go +++ b/backend/internal/config/configurator/configurator.go @@ -17,9 +17,6 @@ import ( ) func readFile(path string) (map[string]string, error) { - if path == "" { - return nil, fmt.Errorf("file path is empty") - } file, err := os.Open(path) if err != nil { return nil, fmt.Errorf("can't open file: %s", err) @@ -40,6 +37,10 @@ func readFile(path string) (map[string]string, error) { } func parseFile(a interface{}, path string) { + // Skip parsing process without logs if we don't have path to config file + if path == "" { + return + } envs, err := readFile(path) if err != nil { log.Printf("can't parse config file: %s", err) From eda3bb3dcbcf112f143522b9fd32ff66cd266744 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 28 Sep 2022 17:14:22 +0200 Subject: [PATCH 060/592] feat(assist): enhanced code --- utilities/utils/assistHelper.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utilities/utils/assistHelper.js b/utilities/utils/assistHelper.js index 006223d25..ece1ea0f3 100644 --- a/utilities/utils/assistHelper.js +++ b/utilities/utils/assistHelper.js @@ -134,7 +134,7 @@ function check(socket, next) { debug && console.error(`projectKey:${projectKey}, sessionId:${sessionId}`); return next(new Error('Authentication error')); } - if (projectKey !== decoded.projectKey || sessionId !== decoded.sessionId) { + if (String(projectKey) !== String(decoded.projectKey) || String(sessionId) !== String(decoded.sessionId)) { debug && console.error(`Trying to access projectKey:${projectKey} instead of ${decoded.projectKey}\nor`); debug && console.error(`Trying to access sessionId:${sessionId} instead of ${decoded.sessionId}`); return next(new Error('Authorization error')); From 43434f892a1dfe32f2e51fe9ca12492c87ebc0cb Mon Sep 17 00:00:00 2001 From: Alexander Zavorotynskiy Date: Wed, 28 Sep 2022 17:29:09 +0200 Subject: [PATCH 061/592] feat(backend): fixes for graceful shutdown in services --- backend/cmd/assets/main.go | 1 + backend/cmd/db/main.go | 43 +++++++++++++++++--------------- backend/cmd/ender/main.go | 2 +- backend/cmd/heuristics/main.go | 2 +- backend/cmd/integrations/main.go | 1 - backend/cmd/sink/main.go | 5 +++- 6 files changed, 30 insertions(+), 24 deletions(-) diff --git a/backend/cmd/assets/main.go b/backend/cmd/assets/main.go index 6f428034c..04dc5e634 100644 --- a/backend/cmd/assets/main.go +++ b/backend/cmd/assets/main.go @@ -68,6 +68,7 @@ func main() { select { case sig := <-sigchan: log.Printf("Caught signal %v: terminating\n", sig) + // TODO: wait assets workers here msgConsumer.Close() os.Exit(0) case err := <-cacher.Errors: diff --git a/backend/cmd/db/main.go b/backend/cmd/db/main.go index 89fb7ce33..00714c5cb 100644 --- a/backend/cmd/db/main.go +++ b/backend/cmd/db/main.go @@ -67,7 +67,7 @@ func main() { // Handler logic msgHandler := func(msg messages.Message) { - statsLogger.Collect(msg) // TODO: carefully check message meta and batch meta confusion situation + statsLogger.Collect(msg) // Just save session data into db without additional checks if err := saver.InsertMessage(msg); err != nil { @@ -127,33 +127,36 @@ func main() { signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM) commitTick := time.Tick(cfg.CommitBatchTimeout) + + // Send collected batches to db + commitDBUpdates := func() { + start := time.Now() + pg.CommitBatches() + pgDur := time.Now().Sub(start).Milliseconds() + + start = time.Now() + if err := saver.CommitStats(); err != nil { + log.Printf("Error on stats commit: %v", err) + } + chDur := time.Now().Sub(start).Milliseconds() + log.Printf("commit duration(ms), pg: %d, ch: %d", pgDur, chDur) + + if err := consumer.Commit(); err != nil { + log.Printf("Error on consumer commit: %v", err) + } + } for { select { case sig := <-sigchan: - log.Printf("Caught signal %v: terminating\n", sig) + log.Printf("Caught signal %s: terminating\n", sig.String()) + commitDBUpdates() consumer.Close() os.Exit(0) case <-commitTick: - // Send collected batches to db - start := time.Now() - pg.CommitBatches() - pgDur := time.Now().Sub(start).Milliseconds() - - start = time.Now() - if err := saver.CommitStats(); err != nil { - log.Printf("Error on stats commit: %v", err) - } - chDur := time.Now().Sub(start).Milliseconds() - log.Printf("commit duration(ms), pg: %d, ch: %d", pgDur, chDur) - - // TODO: use commit worker to save time each tick - if err := consumer.Commit(); err != nil { - log.Printf("Error on consumer commit: %v", err) - } + commitDBUpdates() default: // Handle new message from queue - err := consumer.ConsumeNext() - if err != nil { + if err := consumer.ConsumeNext(); err != nil { log.Fatalf("Error on consumption: %v", err) } } diff --git a/backend/cmd/ender/main.go b/backend/cmd/ender/main.go index b698bb13b..5952aa7cc 100644 --- a/backend/cmd/ender/main.go +++ b/backend/cmd/ender/main.go @@ -46,7 +46,7 @@ func main() { log.Printf("ZERO TS, sessID: %d, msgType: %d", msg.Meta().SessionID(), msg.TypeID()) } statsLogger.Collect(msg) - sessions.UpdateSession(msg) //TODO: recheck timestamps(sessionID, meta.Timestamp, iter.Message().Meta().Timestamp) + sessions.UpdateSession(msg) } consumer := queue.NewConsumer( diff --git a/backend/cmd/heuristics/main.go b/backend/cmd/heuristics/main.go index 82510fdb7..9e4804089 100644 --- a/backend/cmd/heuristics/main.go +++ b/backend/cmd/heuristics/main.go @@ -49,7 +49,7 @@ func main() { msgHandler := func(msg messages.Message) { statsLogger.Collect(msg) - builderMap.HandleMessage(msg) //(sessionID, msg, iter.Message().Meta().Index) + builderMap.HandleMessage(msg) } consumer := queue.NewConsumer( diff --git a/backend/cmd/integrations/main.go b/backend/cmd/integrations/main.go index 9bfa4d7a2..4f5a30dcf 100644 --- a/backend/cmd/integrations/main.go +++ b/backend/cmd/integrations/main.go @@ -17,7 +17,6 @@ import ( "openreplay/backend/pkg/token" ) -// func main() { metrics := monitoring.New("integrations") diff --git a/backend/cmd/sink/main.go b/backend/cmd/sink/main.go index 0b82c3130..71f455275 100644 --- a/backend/cmd/sink/main.go +++ b/backend/cmd/sink/main.go @@ -115,6 +115,9 @@ func main() { select { case sig := <-sigchan: log.Printf("Caught signal %v: terminating\n", sig) + if err := writer.SyncAll(); err != nil { + log.Printf("sync error: %v\n", err) + } if err := consumer.Commit(); err != nil { log.Printf("can't commit messages: %s", err) } @@ -122,7 +125,7 @@ func main() { os.Exit(0) case <-tick: if err := writer.SyncAll(); err != nil { - log.Fatalf("Sync error: %v\n", err) + log.Fatalf("sync error: %v\n", err) } counter.Print() if err := consumer.Commit(); err != nil { From 0d26b1cc9a1fcadffaedf775268985480eef14c7 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 28 Sep 2022 17:44:05 +0200 Subject: [PATCH 062/592] feat(chalice): session mobsUrl exp feat(chalice): notes in replay response --- api/chalicelib/core/sessions.py | 8 ++++--- api/chalicelib/core/sessions_favorite.py | 19 ++++++++------- api/routers/core_dynamic.py | 13 ++++++----- ee/api/chalicelib/core/sessions.py | 8 ++++--- ee/api/chalicelib/core/sessions_exp.py | 16 +++++++++---- ee/api/chalicelib/core/sessions_favorite.py | 26 ++++++++++----------- ee/api/routers/core_dynamic.py | 9 ++++--- 7 files changed, 55 insertions(+), 44 deletions(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 5b43ddbd1..7189146a6 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -3,7 +3,7 @@ from typing import List import schemas from chalicelib.core import events, metadata, events_ios, \ sessions_mobs, issues, projects, errors, resources, assist, performance_event, sessions_viewed, sessions_favorite, \ - sessions_devtool + sessions_devtool, sessions_notes from chalicelib.utils import pg_client, helper, metrics_helper SESSION_PROJECTION_COLS = """s.project_id, @@ -40,8 +40,8 @@ def __group_metadata(session, project_metadata): return meta -def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_viewed=False, group_metadata=False, - live=True): +def get_by_id2_pg(tenant_id, project_id, session_id, user_id, full_data=False, include_fav_viewed=False, + group_metadata=False, live=True): with pg_client.PostgresClient() as cur: extra_query = [] if include_fav_viewed: @@ -100,6 +100,8 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, start_ts=data["startTs"], duration=data["duration"]) + data['notes'] = sessions_notes.get_session_notes(tenant_id=tenant_id, project_id=project_id, + session_id=session_id, user_id=user_id) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) data['live'] = live and assist.is_live(project_id=project_id, diff --git a/api/chalicelib/core/sessions_favorite.py b/api/chalicelib/core/sessions_favorite.py index 691e5ec3e..41d241b4d 100644 --- a/api/chalicelib/core/sessions_favorite.py +++ b/api/chalicelib/core/sessions_favorite.py @@ -2,7 +2,7 @@ from chalicelib.core import sessions from chalicelib.utils import pg_client -def add_favorite_session(project_id, user_id, session_id): +def add_favorite_session(tenant_id, project_id, user_id, session_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""\ @@ -10,11 +10,11 @@ def add_favorite_session(project_id, user_id, session_id): VALUES (%(userId)s,%(session_id)s);""", {"userId": user_id, "session_id": session_id}) ) - return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False, - include_fav_viewed=True) + return sessions.get_by_id2_pg(tenant_id=tenant_id, project_id=project_id, session_id=session_id, user_id=user_id, + full_data=False, include_fav_viewed=True) -def remove_favorite_session(project_id, user_id, session_id): +def remove_favorite_session(tenant_id, project_id, user_id, session_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""\ @@ -23,15 +23,16 @@ def remove_favorite_session(project_id, user_id, session_id): AND session_id = %(session_id)s;""", {"userId": user_id, "session_id": session_id}) ) - return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False, - include_fav_viewed=True) + return sessions.get_by_id2_pg(tenant_id=tenant_id, project_id=project_id, session_id=session_id, user_id=user_id, + full_data=False, include_fav_viewed=True) -def favorite_session(project_id, user_id, session_id): +def favorite_session(tenant_id, project_id, user_id, session_id): if favorite_session_exists(user_id=user_id, session_id=session_id): - return remove_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id) + return remove_favorite_session(tenant_id=tenant_id, project_id=project_id, user_id=user_id, + session_id=session_id) - return add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id) + return add_favorite_session(tenant_id=tenant_id, project_id=project_id, user_id=user_id, session_id=session_id) def favorite_session_exists(user_id, session_id): diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index d2357b319..326d31ab9 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -175,8 +175,8 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): return {"errors": ["session not found"]} - data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, user_id=context.user_id, - include_fav_viewed=True, group_metadata=True) + data = sessions.get_by_id2_pg(tenant_id=context.tenant_id, project_id=projectId, session_id=sessionId, + full_data=True, user_id=context.user_id, include_fav_viewed=True, group_metadata=True) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): @@ -265,8 +265,9 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun context: schemas.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: - data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, - user_id=context.user_id, include_fav_viewed=True, group_metadata=True, live=False) + data = sessions.get_by_id2_pg(tenant_id=context.tenant_id, project_id=projectId, session_id=sessionId, + full_data=True, user_id=context.user_id, include_fav_viewed=True, + group_metadata=True, live=False) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): @@ -328,8 +329,8 @@ def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = def add_remove_favorite_session2(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): return { - "data": sessions_favorite.favorite_session(project_id=projectId, user_id=context.user_id, - session_id=sessionId)} + "data": sessions_favorite.favorite_session(tenant_id=context.tenant_id, project_id=projectId, + user_id=context.user_id, session_id=sessionId)} @app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"]) diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py index 0c908c500..18da3e200 100644 --- a/ee/api/chalicelib/core/sessions.py +++ b/ee/api/chalicelib/core/sessions.py @@ -4,7 +4,7 @@ import schemas import schemas_ee from chalicelib.core import events, metadata, events_ios, \ sessions_mobs, issues, projects, errors, resources, assist, performance_event, sessions_viewed, sessions_favorite, \ - sessions_devtool + sessions_devtool, sessions_notes from chalicelib.utils import pg_client, helper, metrics_helper SESSION_PROJECTION_COLS = """s.project_id, @@ -41,7 +41,7 @@ def __group_metadata(session, project_metadata): return meta -def get_by_id2_pg(project_id, session_id, user_id, context: schemas_ee.CurrentContext, full_data=False, +def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, include_fav_viewed=False, group_metadata=False, live=True): with pg_client.PostgresClient() as cur: extra_query = [] @@ -65,7 +65,7 @@ def get_by_id2_pg(project_id, session_id, user_id, context: schemas_ee.CurrentCo FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} WHERE s.project_id = %(project_id)s AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": user_id} + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} ) # print("===============") # print(query) @@ -102,6 +102,8 @@ def get_by_id2_pg(project_id, session_id, user_id, context: schemas_ee.CurrentCo data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, start_ts=data["startTs"], duration=data["duration"]) + data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, + session_id=session_id, user_id=context.user_id) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) data['live'] = live and assist.is_live(project_id=project_id, diff --git a/ee/api/chalicelib/core/sessions_exp.py b/ee/api/chalicelib/core/sessions_exp.py index 81953bcc5..20c0db9dd 100644 --- a/ee/api/chalicelib/core/sessions_exp.py +++ b/ee/api/chalicelib/core/sessions_exp.py @@ -3,7 +3,8 @@ from typing import List, Union import schemas import schemas_ee from chalicelib.core import events, metadata, events_ios, \ - sessions_mobs, issues, projects, errors, resources, assist, performance_event, metrics + sessions_mobs, issues, projects, errors, resources, assist, performance_event, metrics, sessions_devtool, \ + sessions_notes from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper SESSION_PROJECTION_COLS_CH = """\ @@ -58,8 +59,8 @@ def __group_metadata(session, project_metadata): return meta -def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_viewed=False, group_metadata=False, - live=True): +def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, include_fav_viewed=False, + group_metadata=False, live=True): with pg_client.PostgresClient() as cur: extra_query = [] if include_fav_viewed: @@ -82,7 +83,7 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} WHERE s.project_id = %(project_id)s AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": user_id} + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} ) # print("===============") # print(query) @@ -112,11 +113,16 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ :500] # limit the number of errors to reduce the response-body size data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id, session_id=session_id) - data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id) + data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, + context=context) data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, start_ts=data["startTs"], duration=data["duration"]) + data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, + session_id=session_id, user_id=context.user_id) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) data['live'] = live and assist.is_live(project_id=project_id, diff --git a/ee/api/chalicelib/core/sessions_favorite.py b/ee/api/chalicelib/core/sessions_favorite.py index c3128cd03..c1616e0f9 100644 --- a/ee/api/chalicelib/core/sessions_favorite.py +++ b/ee/api/chalicelib/core/sessions_favorite.py @@ -5,35 +5,35 @@ from chalicelib.core import sessions, sessions_favorite_exp from chalicelib.utils import pg_client, s3_extra -def add_favorite_session(project_id, user_id, session_id, context: schemas_ee.CurrentContext): +def add_favorite_session(project_id, session_id, context: schemas_ee.CurrentContext): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""\ INSERT INTO public.user_favorite_sessions(user_id, session_id) VALUES (%(userId)s,%(sessionId)s);""", - {"userId": user_id, "sessionId": session_id}) + {"userId": context.user_id, "sessionId": session_id}) ) - sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id) - return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False, - include_fav_viewed=True, context=context) + sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) + return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, + full_data=False, include_fav_viewed=True, context=context) -def remove_favorite_session(project_id, user_id, session_id, context: schemas_ee.CurrentContext): +def remove_favorite_session(project_id, session_id, context: schemas_ee.CurrentContext): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""\ DELETE FROM public.user_favorite_sessions WHERE user_id = %(userId)s AND session_id = %(sessionId)s;""", - {"userId": user_id, "sessionId": session_id}) + {"userId": context.user_id, "sessionId": session_id}) ) - sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id) - return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False, - include_fav_viewed=True, context=context) + sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) + return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, + full_data=False, include_fav_viewed=True, context=context) -def favorite_session(project_id, user_id, session_id, context: schemas_ee.CurrentContext): +def favorite_session(tenant_id, project_id, user_id, session_id, context: schemas_ee.CurrentContext): if favorite_session_exists(user_id=user_id, session_id=session_id): key = str(session_id) try: @@ -47,7 +47,7 @@ def favorite_session(project_id, user_id, session_id, context: schemas_ee.Curren except Exception as e: print(f"!!!Error while tagging: {key} to default") print(str(e)) - return remove_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id) + return remove_favorite_session(project_id=project_id, session_id=session_id, context=context) key = str(session_id) try: s3_extra.tag_file(session_id=key, tag_value=config('RETENTION_L_VALUE', default='vault')) @@ -60,7 +60,7 @@ def favorite_session(project_id, user_id, session_id, context: schemas_ee.Curren except Exception as e: print(f"!!!Error while tagging: {key} to vault") print(str(e)) - return add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id, context=context) + return add_favorite_session(project_id=project_id, session_id=session_id, context=context) def favorite_session_exists(user_id, session_id): diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index ee3c3a83f..d06467cdd 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -183,7 +183,7 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): return {"errors": ["session not found"]} - data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, user_id=context.user_id, + data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, include_fav_viewed=True, group_metadata=True, context=context) if data is None: return {"errors": ["session not found"]} @@ -275,8 +275,7 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, - user_id=context.user_id, include_fav_viewed=True, group_metadata=True, live=False, - context=context) + include_fav_viewed=True, group_metadata=True, live=False, context=context) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): @@ -344,8 +343,8 @@ def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = def add_remove_favorite_session2(projectId: int, sessionId: int, context: schemas_ee.CurrentContext = Depends(OR_context)): return { - "data": sessions_favorite.favorite_session(project_id=projectId, user_id=context.user_id, - session_id=sessionId, context=context)} + "data": sessions_favorite.favorite_session(tenant_id=context.tenant_id, project_id=projectId, + user_id=context.user_id, session_id=sessionId, context=context)} @app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"], From 15549050a066487ca446306065206235449fb38c Mon Sep 17 00:00:00 2001 From: Alex Kaminskii Date: Thu, 29 Sep 2022 11:12:52 +0200 Subject: [PATCH 063/592] fix(tracker): 4.1.3: static build (next/nuxt etc) fix --- tracker/tracker/package.json | 2 +- tracker/tracker/src/main/utils.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index 071a5c409..7bbf664ce 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "4.1.2", + "version": "4.1.3", "keywords": [ "logging", "replay" diff --git a/tracker/tracker/src/main/utils.ts b/tracker/tracker/src/main/utils.ts index 0bbe09e33..dfff3d8e5 100644 --- a/tracker/tracker/src/main/utils.ts +++ b/tracker/tracker/src/main/utils.ts @@ -7,7 +7,7 @@ export const IS_FIREFOX = IN_BROWSER && navigator.userAgent.match(/firefox|fxios export const MAX_STR_LEN = 1e5 const navigationStart: number | false = - (IN_BROWSER && performance.timing.navigationStart) || performance.timeOrigin + IN_BROWSER && (performance.timing.navigationStart || performance.timeOrigin) // performance.now() is buggy in some browsers export const timestamp: () => number = IN_BROWSER && performance.now() && navigationStart From 19d616dcd381f6292b89bf43fbbc89115228be1d Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Thu, 29 Sep 2022 12:09:32 +0200 Subject: [PATCH 064/592] chore(helm): Adding vars.yaml Signed-off-by: rjshrjndrn --- scripts/helmcharts/vars.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/helmcharts/vars.yaml b/scripts/helmcharts/vars.yaml index dd1f36883..a308a1ea1 100644 --- a/scripts/helmcharts/vars.yaml +++ b/scripts/helmcharts/vars.yaml @@ -138,6 +138,9 @@ global: # storage: # pvc: # name: mysharedpersistence +# chalice: +# pvc: +# name: mysharedpersistence chalice: env: From c3913e6dca3fc42638d24ed4bc9736f0229d179f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 29 Sep 2022 15:34:37 +0200 Subject: [PATCH 065/592] feat(chalice): changes --- api/chalicelib/core/sessions.py | 11 +++++------ api/chalicelib/core/sessions_notes.py | 9 +++++---- api/routers/core_dynamic.py | 4 ++-- ee/api/chalicelib/core/sessions_notes.py | 6 ++++-- 4 files changed, 16 insertions(+), 14 deletions(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 7189146a6..6361c08c7 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -40,7 +40,7 @@ def __group_metadata(session, project_metadata): return meta -def get_by_id2_pg(tenant_id, project_id, session_id, user_id, full_data=False, include_fav_viewed=False, +def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, group_metadata=False, live=True): with pg_client.PostgresClient() as cur: extra_query = [] @@ -64,7 +64,7 @@ def get_by_id2_pg(tenant_id, project_id, session_id, user_id, full_data=False, i FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} WHERE s.project_id = %(project_id)s AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": user_id} + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} ) # print("===============") # print(query) @@ -100,12 +100,11 @@ def get_by_id2_pg(tenant_id, project_id, session_id, user_id, full_data=False, i data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, start_ts=data["startTs"], duration=data["duration"]) - data['notes'] = sessions_notes.get_session_notes(tenant_id=tenant_id, project_id=project_id, - session_id=session_id, user_id=user_id) + data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, + session_id=session_id, user_id=context.user_id) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) - data['live'] = live and assist.is_live(project_id=project_id, - session_id=session_id, + data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, project_key=data["projectKey"]) data["inDB"] = True return data diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index c1eec5dfc..da2305981 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -55,8 +55,10 @@ def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNote RETURNING *;""", {"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.dict()}) cur.execute(query) - result = cur.fetchone() - return helper.dict_to_camel_case(result) + result = helper.dict_to_camel_case(cur.fetchone()) + if result: + result["createdAt"] = TimeUTC.datetime_to_timestamp(result["createdAt"]) + return result def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNoteSchema): @@ -94,8 +96,7 @@ def delete(tenant_id, user_id, project_id, note_id): cur.execute( cur.mogrify("""\ UPDATE public.sessions_notes - SET - deleted_at = timezone('utc'::text, now()) + SET deleted_at = timezone('utc'::text, now()) WHERE note_id = %(note_id)s AND project_id = %(project_id)s\ diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 326d31ab9..2cb1a7c51 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -175,8 +175,8 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): return {"errors": ["session not found"]} - data = sessions.get_by_id2_pg(tenant_id=context.tenant_id, project_id=projectId, session_id=sessionId, - full_data=True, user_id=context.user_id, include_fav_viewed=True, group_metadata=True) + data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, + include_fav_viewed=True, group_metadata=True, context=context) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index 6c66ebcf5..8dc8411ca 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -55,8 +55,10 @@ def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNote RETURNING *;""", {"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.dict()}) cur.execute(query) - result = cur.fetchone() - return helper.dict_to_camel_case(result) + result = helper.dict_to_camel_case(cur.fetchone()) + if result: + result["createdAt"] = TimeUTC.datetime_to_timestamp(result["createdAt"]) + return result def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNoteSchema): From 5077843305c33d13b8ac6af0a46a9ca58dda9b9f Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Thu, 29 Sep 2022 16:16:13 +0200 Subject: [PATCH 066/592] chore(helm): Adding variables Signed-off-by: rjshrjndrn --- .../openreplay/charts/assist/templates/deployment.yaml | 2 ++ .../openreplay/charts/chalice/templates/deployment.yaml | 2 ++ scripts/helmcharts/vars.yaml | 1 + 3 files changed, 5 insertions(+) diff --git a/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml index b5509775d..6daf5a79f 100644 --- a/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/assist/templates/deployment.yaml @@ -42,6 +42,8 @@ spec: {{- .Values.healthCheck | toYaml | nindent 10}} {{- end}} env: + - name: ASSIST_JWT_SECRET + value: {{ .Values.global.assistJWTSecret }} - name: ASSIST_KEY value: {{ .Values.global.assistKey }} - name: AWS_DEFAULT_REGION diff --git a/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml b/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml index 7474a7eed..6f7f36631 100644 --- a/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml +++ b/scripts/helmcharts/openreplay/charts/chalice/templates/deployment.yaml @@ -42,6 +42,8 @@ spec: {{- .Values.healthCheck | toYaml | nindent 10}} {{- end}} env: + - name: ASSIST_JWT_SECRET + value: {{ .Values.global.assistJWTSecret }} - name: ASSIST_KEY value: {{ .Values.global.assistKey }} - name: LICENSE_KEY diff --git a/scripts/helmcharts/vars.yaml b/scripts/helmcharts/vars.yaml index a308a1ea1..50996a651 100644 --- a/scripts/helmcharts/vars.yaml +++ b/scripts/helmcharts/vars.yaml @@ -101,6 +101,7 @@ global: openReplayContainerRegistry: "public.ecr.aws/p1t3u8a3" # secret key to inject to assist and peers service assistKey: "SetARandomStringHere" + assistJWTSecret: "SetARandomStringHere" s3: region: "us-east-1" endpoint: "http://minio.db.svc.cluster.local:9000" From 224541a0c2987037e69f3546ec6593fa48081256 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 29 Sep 2022 18:06:51 +0200 Subject: [PATCH 067/592] feat(chalice): notes pagination feat(chalice): notes sort --- api/chalicelib/core/sessions_notes.py | 5 +++-- api/routers/core_dynamic.py | 7 ++++--- api/schemas.py | 8 ++++++++ ee/api/chalicelib/core/sessions_notes.py | 5 +++-- ee/api/routers/core_dynamic.py | 8 +++++--- 5 files changed, 23 insertions(+), 10 deletions(-) diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index da2305981..ab3dbd5a1 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -28,7 +28,7 @@ def get_session_notes(tenant_id, project_id, session_id, user_id): return rows -def get_all_notes(tenant_id, project_id, user_id): +def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema): with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes @@ -37,7 +37,8 @@ def get_all_notes(tenant_id, project_id, user_id): AND sessions_notes.deleted_at IS NULL AND (sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public) - ORDER BY created_at DESC;""", + ORDER BY created_at {data.order} + LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""", {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}) cur.execute(query=query) diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 2cb1a7c51..2695a6b09 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -419,9 +419,10 @@ def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = D return data -@app.get('/{projectId}/notes', tags=["sessions", "notes"]) -def get_all_notes(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - data = sessions_notes.get_all_notes(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id) +@app.post('/{projectId}/notes', tags=["sessions", "notes"]) +def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,data=data) if "errors" in data: return data return { diff --git a/api/schemas.py b/api/schemas.py index 9be29e84a..b18550dcd 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1086,6 +1086,14 @@ class IntegrationType(str, Enum): newrelic = "NEWRELIC" +class SearchNoteSchema(_PaginatedSchema): + sort: str = Field(default="createdAt") + order: SortOrderType = Field(default=SortOrderType.desc) + + class Config: + alias_generator = attribute_to_camel_case + + class SessionNoteSchema(BaseModel): message: str = Field(..., min_length=2) tags: List[str] = Field(default=[]) diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index 8dc8411ca..918b5da0a 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -28,7 +28,7 @@ def get_session_notes(tenant_id, project_id, session_id, user_id): return rows -def get_all_notes(tenant_id, project_id, user_id): +def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema): with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes @@ -37,7 +37,8 @@ def get_all_notes(tenant_id, project_id, user_id): AND sessions_notes.deleted_at IS NULL AND (sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s) - ORDER BY created_at DESC;""", + ORDER BY created_at {data.order} + LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""", {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}) cur.execute(query=query) diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index d06467cdd..176896ebb 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -446,9 +446,11 @@ def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = D return data -@app.get('/{projectId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) -def get_all_notes(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - data = sessions_notes.get_all_notes(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id) +@app.post('/{projectId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) +def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId, + user_id=context.user_id) if "errors" in data: return data return { From 1890ff2af1dc5d9818474e33ddbf56814dfa4118 Mon Sep 17 00:00:00 2001 From: Alex Kaminskii Date: Fri, 30 Sep 2022 11:38:23 +0200 Subject: [PATCH 068/592] fix(tracker): no sess-reset on start --- tracker/tracker/package.json | 2 +- tracker/tracker/src/main/app/index.ts | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index 7bbf664ce..8d60a0cee 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "4.1.3", + "version": "4.1.4", "keywords": [ "logging", "replay" diff --git a/tracker/tracker/src/main/app/index.ts b/tracker/tracker/src/main/app/index.ts index 338184b59..e52cd2dc2 100644 --- a/tracker/tracker/src/main/app/index.ts +++ b/tracker/tracker/src/main/app/index.ts @@ -159,7 +159,7 @@ export default class App { this._debug('worker_failed', {}) // add context (from worker) } else if (data === 'restart') { this.stop(false) - this.start({ forceNew: true }) + this.start({ forceNew: true }) // TODO: keep userID & metadata (draw scenarios) } } const alertWorker = () => { @@ -447,7 +447,8 @@ export default class App { ) { return Promise.reject(`Incorrect server response: ${JSON.stringify(r)}`) } - if (sessionID !== this.session.getInfo().sessionID) { + const prevSessionID = this.session.getInfo().sessionID + if (prevSessionID && prevSessionID !== sessionID) { this.session.reset() } this.session.setSessionToken(token) From e81e82d075d895b8e072ce647bba4b0d7017a68b Mon Sep 17 00:00:00 2001 From: sylenien Date: Fri, 30 Sep 2022 11:06:40 +0200 Subject: [PATCH 069/592] change(ui): change session urls --- frontend/app/duck/assignments.js | 10 +++++----- frontend/app/duck/issues.js | 10 +++++----- frontend/app/duck/sessions.js | 10 +++++----- frontend/app/duck/tests/runs.js | 4 ++-- frontend/app/services/SessionService.ts | 2 +- 5 files changed, 18 insertions(+), 18 deletions(-) diff --git a/frontend/app/duck/assignments.js b/frontend/app/duck/assignments.js index 0892df161..896484899 100644 --- a/frontend/app/duck/assignments.js +++ b/frontend/app/duck/assignments.js @@ -48,7 +48,7 @@ const reducer = (state = initialState, action = {}) => { return state.set('activeIssue', Assignment({ ...action.data, users})); case FETCH_META.SUCCESS: issueTypes = action.data.issueTypes - var issueTypeIcons = {} + var issueTypeIcons = {} issueTypes.forEach(iss => { issueTypeIcons[iss.id] = iss.iconUrl }) @@ -96,14 +96,14 @@ export function fetchMeta(projectId) { export function fetchAssignments(sessionId) { return { types: FETCH_ASSIGNMENTS.toArray(), - call: client => client.get(`/sessions2/${ sessionId }/assign`) + call: client => client.get(`/sessions/${ sessionId }/assign`) } } export function fetchAssigment(sessionId, id) { return { types: FETCH_ASSIGNMENT.toArray(), - call: client => client.get(`/sessions2/${ sessionId }/assign/${ id }`) + call: client => client.get(`/sessions/${ sessionId }/assign/${ id }`) } } @@ -111,13 +111,13 @@ export function addActivity(sessionId, params) { const data = { ...params, assignee: params.assignee.value, issueType: params.issueType.value } return { types: ADD_ACTIVITY.toArray(), - call: client => client.post(`/sessions2/${ sessionId }/assign/projects/${params.projectId.value}`, data), + call: client => client.post(`/sessions/${ sessionId }/assign/projects/${params.projectId.value}`, data), } } export function addMessage(sessionId, assignmentId, params) { return { types: ADD_MESSAGE.toArray(), - call: client => client.post(`/sessions2/${ sessionId }/assign/${ assignmentId }/comment`, params), + call: client => client.post(`/sessions/${ sessionId }/assign/${ assignmentId }/comment`, params), } } diff --git a/frontend/app/duck/issues.js b/frontend/app/duck/issues.js index fcd6d9584..6c2c70733 100644 --- a/frontend/app/duck/issues.js +++ b/frontend/app/duck/issues.js @@ -84,7 +84,7 @@ export const edit = createEdit(name); export function fetchAssignments(sessionId) { return { types: FETCH_ASSIGNMENTS.toArray(), - call: client => client.get(`/sessions2/${ sessionId }/assign`) + call: client => client.get(`/sessions/${ sessionId }/assign`) } } @@ -104,7 +104,7 @@ export function fetchProjects() { export function fetchIssue(sessionId, id) { return { types: FETCH_ISSUE.toArray(), - call: client => client.get(`/sessions2/${ sessionId }/assign/jira/${ id }`) + call: client => client.get(`/sessions/${ sessionId }/assign/jira/${ id }`) } } @@ -118,13 +118,13 @@ export function fetchMeta(projectId) { export function addActivity(sessionId, params) { return { types: ADD_ACTIVITY.toArray(), - call: client => client.post(`/sessions2/${ sessionId }/assign`, params.toCreate()), + call: client => client.post(`/sessions/${ sessionId }/assign`, params.toCreate()), } } export function addMessage(sessionId, assignmentId, params) { return { types: ADD_MESSAGE.toArray(), - call: client => client.post(`/sessions2/${ sessionId }/assign/${ assignmentId }/comment`, params), + call: client => client.post(`/sessions/${ sessionId }/assign/${ assignmentId }/comment`, params), } -} \ No newline at end of file +} diff --git a/frontend/app/duck/sessions.js b/frontend/app/duck/sessions.js index d2556fce3..fe8927097 100644 --- a/frontend/app/duck/sessions.js +++ b/frontend/app/duck/sessions.js @@ -241,7 +241,7 @@ export const fetchList = setSessionFilter(cleanSessionFilters(params)); return dispatch({ types: FETCH_LIST.toArray(), - call: (client) => client.post('/sessions/search2', params), + call: (client) => client.post('/sessions/search', params), params: cleanParams(params), }); }; @@ -249,7 +249,7 @@ export const fetchList = export function fetchErrorStackList(sessionId, errorId) { return { types: FETCH_ERROR_STACK.toArray(), - call: (client) => client.get(`/sessions2/${sessionId}/errors/${errorId}/sourcemaps`), + call: (client) => client.get(`/sessions/${sessionId}/errors/${errorId}/sourcemaps`), }; } @@ -258,7 +258,7 @@ export const fetch = (dispatch, getState) => { dispatch({ types: FETCH.toArray(), - call: (client) => client.get(isLive ? `/assist/sessions/${sessionId}` : `/sessions2/${sessionId}`), + call: (client) => client.get(isLive ? `/assist/sessions/${sessionId}` : `/sessions/${sessionId}`), filter: getState().getIn(['filters', 'appliedFilter']), }); }; @@ -266,7 +266,7 @@ export const fetch = export function toggleFavorite(sessionId) { return { types: TOGGLE_FAVORITE.toArray(), - call: (client) => client.get(`/sessions2/${sessionId}/favorite`), + call: (client) => client.get(`/sessions/${sessionId}/favorite`), sessionId, }; } @@ -274,7 +274,7 @@ export function toggleFavorite(sessionId) { export function fetchFavoriteList() { return { types: FETCH_FAVORITE_LIST.toArray(), - call: (client) => client.get('/sessions2/favorite'), + call: (client) => client.get('/sessions/favorite'), }; } diff --git a/frontend/app/duck/tests/runs.js b/frontend/app/duck/tests/runs.js index 19aed0fd0..e5f1ecc3d 100644 --- a/frontend/app/duck/tests/runs.js +++ b/frontend/app/duck/tests/runs.js @@ -40,7 +40,7 @@ const reducer = (state = initialState, action = {}) => { const test = state.get('list').find(({ testId }) => testId === action.testId); const run = Run({ runId: action.data.id, state: RUNNING, testId: action.testId, name: test.name - }); + }); return updateRun(state, action.testId, run); } case STOP_RUN.SUCCESS: { @@ -73,7 +73,7 @@ export default reduceDucks({ reducer, initialState }, requestDuck); export function generateTest(sessionId, params) { return { types: GEN_TEST.toArray(), - call: client => client.post(`/sessions2/${ sessionId }/gentest`, params), + call: client => client.post(`/sessions/${ sessionId }/gentest`, params), }; } diff --git a/frontend/app/services/SessionService.ts b/frontend/app/services/SessionService.ts index 07c623359..01964e41a 100644 --- a/frontend/app/services/SessionService.ts +++ b/frontend/app/services/SessionService.ts @@ -25,7 +25,7 @@ export default class SettingsService { getSessions(filter: any) { return this.client - .post('/sessions/search2', filter) + .post('/sessions/search', filter) .then(fetchErrorCheck) .then((response) => response.data || []); } From 0c79993b655764e87d9b1b473f5d2b68fe469190 Mon Sep 17 00:00:00 2001 From: sylenien Date: Fri, 30 Sep 2022 12:35:02 +0200 Subject: [PATCH 070/592] change(ui): change jump button position, add diffs and tte titles --- .../components/Session_/BottomBlock/Header.js | 3 +- .../components/Session_/Storage/DiffRow.tsx | 4 +- .../components/Session_/Storage/Storage.js | 83 ++++++++----------- .../Session_/Storage/storage.module.css | 5 +- 4 files changed, 42 insertions(+), 53 deletions(-) diff --git a/frontend/app/components/Session_/BottomBlock/Header.js b/frontend/app/components/Session_/BottomBlock/Header.js index 15dd7a0c9..15cdf3365 100644 --- a/frontend/app/components/Session_/BottomBlock/Header.js +++ b/frontend/app/components/Session_/BottomBlock/Header.js @@ -11,9 +11,10 @@ const Header = ({ closeBottomBlock, onFilterChange, showClose = true, + customStyle, ...props }) => ( -
    +
    { children }
    { showClose && } diff --git a/frontend/app/components/Session_/Storage/DiffRow.tsx b/frontend/app/components/Session_/Storage/DiffRow.tsx index 72a9bed46..4dbd39345 100644 --- a/frontend/app/components/Session_/Storage/DiffRow.tsx +++ b/frontend/app/components/Session_/Storage/DiffRow.tsx @@ -7,14 +7,14 @@ interface Props { diff: Record; } -function DiffRow({ diff, path, pathRoot, shades }: Props) { +function DiffRow({ diff, path }: Props) { const [shorten, setShorten] = React.useState(true); const oldValue = diff.item ? JSON.stringify(diff.item.lhs) : JSON.stringify(diff.lhs); const newValue = diff.item ? JSON.stringify(diff.item.rhs) : JSON.stringify(diff.rhs); const pathStr = path.length > 15 && shorten ? path.slice(0, 5) + '...' + path.slice(10) : path; return ( -
    +
    15 ? 'cursor-pointer' : ''} onClick={() => setShorten(false)}> {pathStr} {': '} diff --git a/frontend/app/components/Session_/Storage/Storage.js b/frontend/app/components/Session_/Storage/Storage.js index f4f2cd80b..f159ab8ce 100644 --- a/frontend/app/components/Session_/Storage/Storage.js +++ b/frontend/app/components/Session_/Storage/Storage.js @@ -17,6 +17,7 @@ import BottomBlock from '../BottomBlock/index'; import DiffRow from './DiffRow'; import cn from 'classnames'; import stl from './storage.module.css'; +import { Tooltip } from 'react-tippy' // const STATE = 'STATE'; // const DIFF = 'DIFF'; @@ -25,7 +26,6 @@ import stl from './storage.module.css'; function getActionsName(type) { switch (type) { case STORAGE_TYPES.MOBX: - return 'MUTATIONS'; case STORAGE_TYPES.VUEX: return 'MUTATIONS'; default: @@ -33,26 +33,6 @@ function getActionsName(type) { } } -const PATH_BGS = [ - '255, 173, 173', - '202, 255, 191', - '155, 246, 255', - '255, 198, 255', - '160, 196, 255', - '251, 248, 204', - '253, 228, 207', - '255, 207, 210', - '241, 192, 232', - '207, 186, 240', - '163, 196, 243', - '144, 219, 244', - '142, 236, 245', - '152, 245, 225', - '185, 251, 192', -]; - -const buildBg = (shade) => `rgba(${shade}, 0.2)`; - @connectPlayer((state) => ({ type: selectStorageType(state), list: selectStorageList(state), @@ -68,9 +48,8 @@ const buildBg = (shade) => `rgba(${shade}, 0.2)`; ) //@withEnumToggle('activeTab', 'setActiveTab', DIFF) export default class Storage extends React.PureComponent { - pathShades = {}; - lastBtnRef = React.createRef(); + state = { showDiffs: false }; focusNextButton() { if (this.lastBtnRef.current) { @@ -105,39 +84,33 @@ export default class Storage extends React.PureComponent { } return ( -
    +
    {stateDiff.map((d, i) => this.renderDiffs(d, i))}
    ); } renderDiffs(diff, i) { - const [path, pathRoot] = this.createPathAndBg(diff); + const path = this.createPath(diff); return ( - + ); } - createPathAndBg = (diff) => { + createPath = (diff) => { let path = []; - let pathRoot; if (diff.path) { path = path.concat(diff.path); - pathRoot = diff.path[0]; - if (!this.pathShades[pathRoot]) { - const randomShade = PATH_BGS[Math.floor(Math.random() * PATH_BGS.length)]; - this.pathShades[pathRoot] = buildBg(randomShade); - } } if (typeof diff.index !== 'undefined') { path.push(diff.index); } const pathStr = path.length ? path.join('.') : ''; - return [pathStr, pathRoot]; + return pathStr; }; ensureString(actionType) { @@ -182,6 +155,10 @@ export default class Storage extends React.PureComponent { name = item.mutation.join(''); } + if (src !== null && !this.state.showDiffs) { + this.setState({ showDiffs: true }) + } + return (
    )} -
    - {i + 1 < listNow.length && ( - - )} - {i + 1 === listNow.length && i + 1 < list.length && ( - - )} +
    {typeof item.duration === 'number' && (
    {formatMs(item.duration)}
    )} +
    + {i + 1 < listNow.length && ( + + )} + {i + 1 === listNow.length && i + 1 < list.length && ( + + )} +
    ); @@ -233,8 +212,18 @@ export default class Storage extends React.PureComponent { {list.length > 0 && (
    - {showStore &&

    {'STORE'}

    } -

    {getActionsName(type)}

    + {showStore &&

    {'STATE'}

    } + {this.state.showDiffs ? ( +

    + DIFFS +

    + ) : null} +

    {getActionsName(type)}

    +

    + + TTE + +

    )}
    diff --git a/frontend/app/components/Session_/Storage/storage.module.css b/frontend/app/components/Session_/Storage/storage.module.css index 55f04d5db..d34ab27b9 100644 --- a/frontend/app/components/Session_/Storage/storage.module.css +++ b/frontend/app/components/Session_/Storage/storage.module.css @@ -1,8 +1,7 @@ .button { - padding: 2px 6px; + padding: 3px 6px; cursor: pointer; - width: 60px; border-radius: 3px; color: $gray-light; &:hover { @@ -14,4 +13,4 @@ font-size: 12px; margin-right: 5px; -} \ No newline at end of file +} From cc24b1368652f10055369d5211e5932f7a0942a5 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 30 Sep 2022 15:41:47 +0200 Subject: [PATCH 071/592] feat(chalice): search notes by tags --- api/chalicelib/core/sessions_notes.py | 17 ++++++++++------- api/schemas.py | 1 + ee/api/chalicelib/core/sessions_notes.py | 15 ++++++++++----- 3 files changed, 21 insertions(+), 12 deletions(-) diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index ab3dbd5a1..77e487df3 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -2,6 +2,7 @@ import json import schemas from chalicelib.core import users +from chalicelib.core.sessions import _multiple_conditions, _multiple_values from chalicelib.utils import pg_client, helper, dev from chalicelib.utils.TimeUTC import TimeUTC @@ -10,7 +11,6 @@ def get_session_notes(tenant_id, project_id, session_id, user_id): with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes - INNER JOIN users USING (user_id) WHERE sessions_notes.project_id = %(project_id)s AND sessions_notes.deleted_at IS NULL AND sessions_notes.session_id = %(session_id)s @@ -30,16 +30,19 @@ def get_session_notes(tenant_id, project_id, session_id, user_id): def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema): with pg_client.PostgresClient() as cur: + conditions = ["sessions_notes.project_id = %(project_id)s", "sessions_notes.deleted_at IS NULL", + "(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public)"] + extra_params = {} + if data.tags and len(data.tags) > 0: + k = "tag" + conditions.append(_multiple_conditions(f"%({k})s = ANY (s.issue_types)", data.tags, value_key=k)) + extra_params = _multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes - INNER JOIN users USING (user_id) - WHERE sessions_notes.project_id = %(project_id)s - AND sessions_notes.deleted_at IS NULL - AND (sessions_notes.user_id = %(user_id)s - OR sessions_notes.is_public) + WHERE {" AND ".join(conditions)} ORDER BY created_at {data.order} LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""", - {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}) + {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params}) cur.execute(query=query) rows = cur.fetchall() diff --git a/api/schemas.py b/api/schemas.py index b18550dcd..db5c0cf82 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1089,6 +1089,7 @@ class IntegrationType(str, Enum): class SearchNoteSchema(_PaginatedSchema): sort: str = Field(default="createdAt") order: SortOrderType = Field(default=SortOrderType.desc) + tags: Optional[List[str]] = Field(default=[]) class Config: alias_generator = attribute_to_camel_case diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index 918b5da0a..baf71526d 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -2,6 +2,7 @@ import json import schemas from chalicelib.core import users +from chalicelib.core.sessions import _multiple_conditions, _multiple_values from chalicelib.utils import pg_client, helper, dev from chalicelib.utils.TimeUTC import TimeUTC @@ -30,16 +31,20 @@ def get_session_notes(tenant_id, project_id, session_id, user_id): def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema): with pg_client.PostgresClient() as cur: + conditions = ["sessions_notes.project_id = %(project_id)s", "sessions_notes.deleted_at IS NULL", + "(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s)"] + extra_params = {} + if data.tags and len(data.tags) > 0: + k = "tag" + conditions.append(_multiple_conditions(f"%({k})s = ANY (s.issue_types)", data.tags, value_key=k)) + extra_params = _multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes INNER JOIN users USING (user_id) - WHERE sessions_notes.project_id = %(project_id)s - AND sessions_notes.deleted_at IS NULL - AND (sessions_notes.user_id = %(user_id)s - OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s) + WHERE {" AND ".join(conditions)} ORDER BY created_at {data.order} LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""", - {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}) + {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params}) cur.execute(query=query) rows = cur.fetchall() From c21aa47f6e470fb07fa1c4da0d784069e752c5fa Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 30 Sep 2022 15:54:02 +0200 Subject: [PATCH 072/592] feat(chalice): search notes by tags --- api/chalicelib/core/sessions_notes.py | 11 ++++------- ee/api/chalicelib/core/sessions_notes.py | 11 ++++------- 2 files changed, 8 insertions(+), 14 deletions(-) diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index 77e487df3..d7b29fbb3 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -1,9 +1,6 @@ -import json - import schemas -from chalicelib.core import users -from chalicelib.core.sessions import _multiple_conditions, _multiple_values -from chalicelib.utils import pg_client, helper, dev +from chalicelib.core import sessions +from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC @@ -35,8 +32,8 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se extra_params = {} if data.tags and len(data.tags) > 0: k = "tag" - conditions.append(_multiple_conditions(f"%({k})s = ANY (s.issue_types)", data.tags, value_key=k)) - extra_params = _multiple_values(data.tags, value_key=k) + conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.issue_types)", data.tags, value_key=k)) + extra_params = sessions._multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes WHERE {" AND ".join(conditions)} diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index baf71526d..fe26e23ac 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -1,9 +1,6 @@ -import json - import schemas -from chalicelib.core import users -from chalicelib.core.sessions import _multiple_conditions, _multiple_values -from chalicelib.utils import pg_client, helper, dev +from chalicelib.core import sessions +from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC @@ -36,8 +33,8 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se extra_params = {} if data.tags and len(data.tags) > 0: k = "tag" - conditions.append(_multiple_conditions(f"%({k})s = ANY (s.issue_types)", data.tags, value_key=k)) - extra_params = _multiple_values(data.tags, value_key=k) + conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.issue_types)", data.tags, value_key=k)) + extra_params = sessions._multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes INNER JOIN users USING (user_id) From a1cfc3db8fccbc0c48fdc93f0fa8eac6563ba4ba Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 30 Sep 2022 16:02:11 +0200 Subject: [PATCH 073/592] feat(chalice): search notes by tags --- api/chalicelib/core/sessions_notes.py | 2 +- ee/api/chalicelib/core/sessions_notes.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index d7b29fbb3..04855c591 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -32,7 +32,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se extra_params = {} if data.tags and len(data.tags) > 0: k = "tag" - conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.issue_types)", data.tags, value_key=k)) + conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.tags)", data.tags, value_key=k)) extra_params = sessions._multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index fe26e23ac..36aff24c2 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -33,7 +33,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se extra_params = {} if data.tags and len(data.tags) > 0: k = "tag" - conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.issue_types)", data.tags, value_key=k)) + conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.tags)", data.tags, value_key=k)) extra_params = sessions._multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes From 91202cfa38478a7bc334c44bf338055f93a18acf Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 30 Sep 2022 16:06:05 +0200 Subject: [PATCH 074/592] feat(chalice): search notes by tags --- api/chalicelib/core/sessions_notes.py | 2 +- ee/api/chalicelib/core/sessions_notes.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index 04855c591..a82ff9975 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -31,7 +31,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se "(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public)"] extra_params = {} if data.tags and len(data.tags) > 0: - k = "tag" + k = "tag_value" conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.tags)", data.tags, value_key=k)) extra_params = sessions._multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index 36aff24c2..4b2826dfd 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -32,7 +32,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se "(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s)"] extra_params = {} if data.tags and len(data.tags) > 0: - k = "tag" + k = "tag_value" conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.tags)", data.tags, value_key=k)) extra_params = sessions._multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* From 6f190f3a81d30751b19aa20335216eae28bbd642 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 30 Sep 2022 17:02:17 +0200 Subject: [PATCH 075/592] feat(chalice): jwt expiration changes --- api/chalicelib/core/assist.py | 2 +- api/chalicelib/core/authorizers.py | 2 +- api/env.default | 4 ++-- ee/api/env.default | 6 ++++-- 4 files changed, 8 insertions(+), 6 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index b4fc3a9f8..ebf1b7ab8 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -89,7 +89,7 @@ def __get_agent_token(project_id, project_key, session_id): "projectId": project_id, "sessionId": session_id, "iat": iat // 1000, - "exp": iat // 1000 + config("JWT_EXP_DELTA_SECONDS", cast=int) + TimeUTC.get_utc_offset() // 1000, + "exp": iat // 1000 + config("ASSIST_JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000, "iss": config("JWT_ISSUER"), "aud": f"openreplay:agent" }, diff --git a/api/chalicelib/core/authorizers.py b/api/chalicelib/core/authorizers.py index a474fcb8d..2ec3fa01f 100644 --- a/api/chalicelib/core/authorizers.py +++ b/api/chalicelib/core/authorizers.py @@ -42,7 +42,7 @@ def generate_jwt(id, tenant_id, iat, aud): payload={ "userId": id, "tenantId": tenant_id, - "exp": iat // 1000 + config("ASSIST_JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000, + "exp": iat // 1000 + config("JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000, "iss": config("JWT_ISSUER"), "iat": iat // 1000, "aud": aud diff --git a/api/env.default b/api/env.default index 703cdc887..676feb541 100644 --- a/api/env.default +++ b/api/env.default @@ -18,7 +18,7 @@ change_password_link=/reset-password?invitation=%s&&pass=%s invitation_link=/api/users/invitation?token=%s js_cache_bucket=sessions-assets jwt_algorithm=HS512 -JWT_EXP_DELTA_SECONDS=2592000 +JWT_EXPIRATION=2592000 JWT_ISSUER=openreplay-oss jwt_secret="SET A RANDOM STRING HERE" ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s @@ -48,5 +48,5 @@ SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob PRESIGNED_URL_EXPIRATION=3600 -ASSIST_JWT_EXPIRATION=1800 +ASSIST_JWT_EXPIRATION=144000 ASSIST_JWT_SECRET= \ No newline at end of file diff --git a/ee/api/env.default b/ee/api/env.default index 8f0765d92..94037cf1a 100644 --- a/ee/api/env.default +++ b/ee/api/env.default @@ -28,7 +28,7 @@ idp_x509cert= invitation_link=/api/users/invitation?token=%s js_cache_bucket=sessions-assets jwt_algorithm=HS512 -JWT_EXP_DELTA_SECONDS=2592000 +JWT_EXPIRATION=2592000 JWT_ISSUER=openreplay-ee jwt_secret="SET A RANDOM STRING HERE" ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s @@ -66,4 +66,6 @@ EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob -PRESIGNED_URL_EXPIRATION=3600 \ No newline at end of file +PRESIGNED_URL_EXPIRATION=3600 +ASSIST_JWT_EXPIRATION=144000 +ASSIST_JWT_SECRET= \ No newline at end of file From 71fb4e9e83989aabcadb5426a4ad64b7f1f76e81 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 30 Sep 2022 17:19:56 +0200 Subject: [PATCH 076/592] feat(chalice): search notes by tags --- api/chalicelib/core/sessions_notes.py | 2 +- ee/api/chalicelib/core/sessions_notes.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index a82ff9975..f0e7bfb48 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -32,7 +32,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se extra_params = {} if data.tags and len(data.tags) > 0: k = "tag_value" - conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.tags)", data.tags, value_key=k)) + conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (sessions_notes.tags)", data.tags, value_key=k)) extra_params = sessions._multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index 4b2826dfd..df0e8bfa6 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -33,7 +33,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se extra_params = {} if data.tags and len(data.tags) > 0: k = "tag_value" - conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (s.tags)", data.tags, value_key=k)) + conditions.append(sessions._multiple_conditions(f"%({k})s = ANY (sessions_notes.tags)", data.tags, value_key=k)) extra_params = sessions._multiple_values(data.tags, value_key=k) query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes From 3484e128840bbc8b375919c587642c0d498d0b2d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 30 Sep 2022 18:25:08 +0200 Subject: [PATCH 077/592] feat(chalice): changed devtools URL pattern --- api/env.default | 2 +- ee/api/env.default | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/env.default b/api/env.default index 676feb541..2dcafbc8a 100644 --- a/api/env.default +++ b/api/env.default @@ -46,7 +46,7 @@ EFS_SESSION_MOB_PATTERN=%(sessionId)s/dom.mob EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe -DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob +DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mobs PRESIGNED_URL_EXPIRATION=3600 ASSIST_JWT_EXPIRATION=144000 ASSIST_JWT_SECRET= \ No newline at end of file diff --git a/ee/api/env.default b/ee/api/env.default index 94037cf1a..98c94c9b5 100644 --- a/ee/api/env.default +++ b/ee/api/env.default @@ -65,7 +65,7 @@ EFS_SESSION_MOB_PATTERN=%(sessionId)s/dom.mob EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe -DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob +DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mobs PRESIGNED_URL_EXPIRATION=3600 ASSIST_JWT_EXPIRATION=144000 ASSIST_JWT_SECRET= \ No newline at end of file From 223d3ea751bdb28dec1054629c6078315e9b9e2e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 30 Sep 2022 19:21:45 +0200 Subject: [PATCH 078/592] feat(chalice): changed update member --- api/chalicelib/core/users.py | 73 +++++++++++++++++++++++++-- api/routers/core_dynamic.py | 2 +- ee/api/chalicelib/core/users.py | 87 +++++++++++++++++++++++++++++++-- ee/api/routers/core_dynamic.py | 4 +- 4 files changed, 153 insertions(+), 13 deletions(-) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 9af070fc5..b8b3e9898 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -252,9 +252,8 @@ def generate_new_api_key(user_id): cur.mogrify( f"""UPDATE public.users SET api_key=generate_api_key(20) - WHERE - users.user_id = %(userId)s - AND deleted_at IS NULL + WHERE users.user_id = %(userId)s + AND deleted_at IS NULL RETURNING api_key;""", {"userId": user_id}) ) @@ -295,6 +294,39 @@ def edit(user_id_to_update, tenant_id, changes: schemas.EditUserSchema, editor_i return {"data": user} +def edit_member(user_id_to_update, tenant_id, changes: schemas.EditUserSchema, editor_id): + user = get_member(user_id=user_id_to_update, tenant_id=tenant_id) + if editor_id != user_id_to_update or changes.admin is not None and changes.admin != user["admin"]: + admin = get(tenant_id=tenant_id, user_id=editor_id) + if not admin["superAdmin"] and not admin["admin"]: + return {"errors": ["unauthorized"]} + _changes = {} + if editor_id == user_id_to_update: + if changes.admin is not None: + if user["superAdmin"]: + changes.admin = None + elif changes.admin != user["admin"]: + return {"errors": ["cannot change your own role"]} + + if changes.email is not None and changes.email != user["email"]: + if email_exists(changes.email): + return {"errors": ["email already exists."]} + if get_deleted_user_by_email(changes.email) is not None: + return {"errors": ["email previously deleted."]} + _changes["email"] = changes.email + + if changes.name is not None and len(changes.name) > 0: + _changes["name"] = changes.name + + if changes.admin is not None: + _changes["role"] = "admin" if changes.admin else "member" + + if len(_changes.keys()) > 0: + update(tenant_id=tenant_id, user_id=user_id_to_update, changes=_changes) + return {"data": get_member(user_id=user_id_to_update, tenant_id=tenant_id)} + return {"data": user} + + def get_by_email_only(email): with pg_client.PostgresClient() as cur: cur.execute( @@ -342,11 +374,42 @@ def get_by_email_reset(email, reset_token): return helper.dict_to_camel_case(r) +def get_member(tenant_id, user_id): + with pg_client.PostgresClient() as cur: + cur.execute(cur.mogrify( + f"""SELECT + users.user_id, + users.email, + users.role, + users.name, + users.created_at, + (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, + (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, + (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, + DATE_PART('day',timezone('utc'::text, now()) \ + - COALESCE(basic_authentication.invited_at,'2000-01-01'::timestamp ))>=1 AS expired_invitation, + basic_authentication.password IS NOT NULL AS joined, + invitation_token + FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id + WHERE users.deleted_at IS NULL AND users.user_id=%(user_id)s + ORDER BY name, user_id""", {"user_id": user_id}) + ) + u = helper.dict_to_camel_case(cur.fetchone()) + if u: + u["createdAt"] = TimeUTC.datetime_to_timestamp(u["createdAt"]) + if u["invitationToken"]: + u["invitationLink"] = __get_invitation_link(u.pop("invitationToken")) + else: + u["invitationLink"] = None + + return u + + def get_members(tenant_id): with pg_client.PostgresClient() as cur: cur.execute( f"""SELECT - users.user_id AS id, + users.user_id, users.email, users.role, users.name, @@ -360,7 +423,7 @@ def get_members(tenant_id): invitation_token FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id WHERE users.deleted_at IS NULL - ORDER BY name, id""" + ORDER BY name, user_id""" ) r = cur.fetchall() if len(r): diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 2695a6b09..7bb02461a 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -140,7 +140,7 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = @app.post('/client/members/{memberId}', tags=["client"]) def edit_member(memberId: int, data: schemas.EditMemberSchema, context: schemas.CurrentContext = Depends(OR_context)): - return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, + return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, user_id_to_update=memberId) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 661194bbb..f533fa698 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -293,9 +293,8 @@ def generate_new_api_key(user_id): cur.mogrify( f"""UPDATE public.users SET api_key=generate_api_key(20) - WHERE - users.user_id = %(userId)s - AND deleted_at IS NULL + WHERE users.user_id = %(userId)s + AND deleted_at IS NULL RETURNING api_key;""", {"userId": user_id}) ) @@ -344,6 +343,47 @@ def edit(user_id_to_update, tenant_id, changes: schemas_ee.EditUserSchema, edito return {"data": user} +def edit_member(user_id_to_update, tenant_id, changes: schemas_ee.EditUserSchema, editor_id): + user = get_member(user_id=user_id_to_update, tenant_id=tenant_id) + if editor_id != user_id_to_update or changes.admin is not None and changes.admin != user["admin"]: + admin = get(tenant_id=tenant_id, user_id=editor_id) + if not admin["superAdmin"] and not admin["admin"]: + return {"errors": ["unauthorized"]} + _changes = {} + if editor_id == user_id_to_update: + if changes.admin is not None: + if user["superAdmin"]: + changes.admin = None + elif changes.admin != user["admin"]: + return {"errors": ["cannot change your own role"]} + if changes.roleId is not None: + if user["superAdmin"]: + changes.roleId = None + elif changes.roleId != user["roleId"]: + return {"errors": ["cannot change your own role"]} + + if changes.email is not None and changes.email != user["email"]: + if email_exists(changes.email): + return {"errors": ["email already exists."]} + if get_deleted_user_by_email(changes.email) is not None: + return {"errors": ["email previously deleted."]} + _changes["email"] = changes.email + + if changes.name is not None and len(changes.name) > 0: + _changes["name"] = changes.name + + if changes.admin is not None: + _changes["role"] = "admin" if changes.admin else "member" + + if changes.roleId is not None: + _changes["roleId"] = changes.roleId + + if len(_changes.keys()) > 0: + update(tenant_id=tenant_id, user_id=user_id_to_update, changes=_changes) + return {"data": get_member(tenant_id=tenant_id, user_id=user_id_to_update)} + return {"data": user} + + def get_by_email_only(email): with pg_client.PostgresClient() as cur: cur.execute( @@ -393,12 +433,49 @@ def get_by_email_reset(email, reset_token): return helper.dict_to_camel_case(r) +def get_member(tenant_id, user_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify( + f"""SELECT + users.user_id, + users.email, + users.role, + users.name, + users.created_at, + (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, + (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, + (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, + DATE_PART('day',timezone('utc'::text, now()) \ + - COALESCE(basic_authentication.invited_at,'2000-01-01'::timestamp ))>=1 AS expired_invitation, + basic_authentication.password IS NOT NULL OR users.origin IS NOT NULL AS joined, + invitation_token, + role_id, + roles.name AS role_name + FROM public.users + LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id + LEFT JOIN public.roles USING (role_id) + WHERE users.tenant_id = %(tenant_id)s AND users.deleted_at IS NULL AND users.user_id = %(user_id)s + ORDER BY name, user_id""", + {"tenant_id": tenant_id, "user_id": user_id}) + ) + u = helper.dict_to_camel_case(cur.fetchone()) + if u: + u["createdAt"] = TimeUTC.datetime_to_timestamp(u["createdAt"]) + if u["invitationToken"]: + u["invitationLink"] = __get_invitation_link(u.pop("invitationToken")) + else: + u["invitationLink"] = None + + return u + + def get_members(tenant_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify( f"""SELECT - users.user_id AS id, + users.user_id, users.email, users.role, users.name, @@ -416,7 +493,7 @@ def get_members(tenant_id): LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id LEFT JOIN public.roles USING (role_id) WHERE users.tenant_id = %(tenant_id)s AND users.deleted_at IS NULL - ORDER BY name, id""", + ORDER BY name, user_id""", {"tenant_id": tenant_id}) ) r = cur.fetchall() diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 176896ebb..b3dac897d 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -144,8 +144,8 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = @app.post('/client/members/{memberId}', tags=["client"]) def edit_member(memberId: int, data: schemas_ee.EditMemberSchema, context: schemas.CurrentContext = Depends(OR_context)): - return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, - user_id_to_update=memberId) + return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, + user_id_to_update=memberId) @app.get('/metadata/session_search', tags=["metadata"]) From fdc9b1d3053d317f4943bde4d1ad7193316671bc Mon Sep 17 00:00:00 2001 From: sylenien Date: Mon, 26 Sep 2022 15:27:22 +0200 Subject: [PATCH 079/592] feat(ui): draft note taking popup --- .../Session_/Player/Controls/TimeTooltip.tsx | 27 ---- .../Controls/components/NoteTooltip.tsx | 73 +++++++++ .../Controls/components/TimeTooltip.tsx | 38 +++++ .../Controls/components/TooltipContainer.tsx | 8 +- .../Controls/components/styles.module.css | 52 +++++++ frontend/app/components/Session_/Subheader.js | 141 ++++++++++-------- .../app/components/ui/Checkbox/Checkbox.tsx | 2 +- frontend/app/components/ui/SVG.tsx | 3 +- frontend/app/duck/sessions.js | 3 +- frontend/app/logger/index.js | 13 ++ .../messages/MFileReader.ts | 6 +- frontend/app/svg/icons/quotes.svg | 3 + 12 files changed, 269 insertions(+), 100 deletions(-) delete mode 100644 frontend/app/components/Session_/Player/Controls/TimeTooltip.tsx create mode 100644 frontend/app/components/Session_/Player/Controls/components/NoteTooltip.tsx create mode 100644 frontend/app/components/Session_/Player/Controls/components/TimeTooltip.tsx create mode 100644 frontend/app/components/Session_/Player/Controls/components/styles.module.css create mode 100644 frontend/app/svg/icons/quotes.svg diff --git a/frontend/app/components/Session_/Player/Controls/TimeTooltip.tsx b/frontend/app/components/Session_/Player/Controls/TimeTooltip.tsx deleted file mode 100644 index fe22c4ea9..000000000 --- a/frontend/app/components/Session_/Player/Controls/TimeTooltip.tsx +++ /dev/null @@ -1,27 +0,0 @@ -import React from 'react'; -// @ts-ignore -import { Duration } from 'luxon'; -import { connect } from 'react-redux'; -// @ts-ignore -import stl from './timeline.module.css'; - -function TimeTooltip({ time, offset, isVisible, liveTimeTravel }: { time: number; offset: number; isVisible: boolean, liveTimeTravel: boolean }) { - const duration = Duration.fromMillis(time).toFormat(`${liveTimeTravel ? '-' : ''}mm:ss`); - return ( -
    - {!time ? 'Loading' : duration} -
    - ); -} - -export default connect((state) => { - const { time = 0, offset = 0, isVisible } = state.getIn(['sessions', 'timeLineTooltip']); - return { time, offset, isVisible }; -})(TimeTooltip); diff --git a/frontend/app/components/Session_/Player/Controls/components/NoteTooltip.tsx b/frontend/app/components/Session_/Player/Controls/components/NoteTooltip.tsx new file mode 100644 index 000000000..ad3de3079 --- /dev/null +++ b/frontend/app/components/Session_/Player/Controls/components/NoteTooltip.tsx @@ -0,0 +1,73 @@ +import React from 'react'; +import { Icon, Button, Checkbox } from 'UI'; +import { Duration } from 'luxon'; +import { connect } from 'react-redux'; +import stl from './styles.module.css'; + +interface Props { + offset: number; + isVisible: boolean; + time: number; +} + +const TAGS = ['QUERY', 'ISSUE', 'TASK', 'OTHER']; + +function NoteTooltip({ offset, isVisible, time }: Props) { + const duration = Duration.fromMillis(time).toFormat('mm:ss'); + + const stopEvents = (e: any) => { + e.stopPropagation(); + }; + + return ( +
    +
    + +

    Add Note

    +
    + + {`at ${duration}`} +
    + +
    + +
    +
    + +
    text field
    + +
    + {TAGS.map((tag) => ( +
    {tag}
    + ))} +
    + +
    + +
    + + + Visible to the team +
    +
    + +
    +
    + ); +} + +export default connect((state) => { + const { offset = 0, isVisible, time = 0 } = state.getIn(['sessions', 'noteTooltip']); + return { offset, isVisible, time }; +})(NoteTooltip); diff --git a/frontend/app/components/Session_/Player/Controls/components/TimeTooltip.tsx b/frontend/app/components/Session_/Player/Controls/components/TimeTooltip.tsx new file mode 100644 index 000000000..e1be98622 --- /dev/null +++ b/frontend/app/components/Session_/Player/Controls/components/TimeTooltip.tsx @@ -0,0 +1,38 @@ +import React from 'react'; +// @ts-ignore +import { Duration } from 'luxon'; +import { connect } from 'react-redux'; +import stl from './styles.module.css'; + +interface Props { + time: number; + offset: number; + isVisible: boolean; + liveTimeTravel: boolean; +} + +function TimeTooltip({ + time, + offset, + isVisible, + liveTimeTravel, +}: Props) { + const duration = Duration.fromMillis(time).toFormat(`${liveTimeTravel ? '-' : ''}mm:ss`); + return ( +
    + {!time ? 'Loading' : duration} +
    + ); +} + +export default connect((state) => { + const { time = 0, offset = 0, isVisible } = state.getIn(['sessions', 'timeLineTooltip']); + return { time, offset, isVisible }; +})(TimeTooltip); diff --git a/frontend/app/components/Session_/Player/Controls/components/TooltipContainer.tsx b/frontend/app/components/Session_/Player/Controls/components/TooltipContainer.tsx index 19396fbc6..8488fa848 100644 --- a/frontend/app/components/Session_/Player/Controls/components/TooltipContainer.tsx +++ b/frontend/app/components/Session_/Player/Controls/components/TooltipContainer.tsx @@ -1,5 +1,6 @@ import React from 'react' -import TimeTooltip from '../TimeTooltip'; +import TimeTooltip from './TimeTooltip'; +import NoteTooltip from './NoteTooltip'; import store from 'App/store'; import { Provider } from 'react-redux'; @@ -7,7 +8,10 @@ function TooltipContainer({ live }: { live: boolean }) { return ( - + <> + + + ) } diff --git a/frontend/app/components/Session_/Player/Controls/components/styles.module.css b/frontend/app/components/Session_/Player/Controls/components/styles.module.css new file mode 100644 index 000000000..bba5d3057 --- /dev/null +++ b/frontend/app/components/Session_/Player/Controls/components/styles.module.css @@ -0,0 +1,52 @@ + +.timeTooltip { + position: absolute; + padding: 0.25rem; + transition-property: all; + transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); + transition-duration: 150ms; + background: black; + top: -35px; + color: white; + + &:after { + content:''; + position: absolute; + top: 100%; + left: 0; + right: 0; + margin: 0 auto; + width: 0; + height: 0; + border-top: solid 5px black; + border-left: solid 5px transparent; + border-right: solid 5px transparent; + } +} + +.noteTooltip { + position: absolute; + padding: 1rem; + transition-property: all; + transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); + transition-duration: 150ms; + background: #F5F5F5; + top: -35px; + color: black; + border-radius: 12px; + cursor: default; + box-shadow: 0 4px 20px 4px rgb(0 20 60 / 10%), 0 4px 80px -8px rgb(0 20 60 / 20%); +} + +.arrow { + position: absolute; + top: 100%; + left: 0; + right: 0; + margin: 0 auto; + width: 0; + height: 0; + border-top: solid 10px #f5f5f5; + border-left: solid 10px transparent; + border-right: solid 10px transparent; +} diff --git a/frontend/app/components/Session_/Subheader.js b/frontend/app/components/Session_/Subheader.js index c378386ee..1f36382c7 100644 --- a/frontend/app/components/Session_/Subheader.js +++ b/frontend/app/components/Session_/Subheader.js @@ -1,80 +1,91 @@ import React from 'react'; import { Icon } from 'UI'; import Autoplay from './Autoplay'; -import Bookmark from 'Shared/Bookmark' +import Bookmark from 'Shared/Bookmark'; import SharePopup from '../shared/SharePopup/SharePopup'; -import { connectPlayer } from 'Player'; +import { connectPlayer, pause } from 'Player'; import copy from 'copy-to-clipboard'; import { Tooltip } from 'react-tippy'; import Issues from './Issues/Issues'; function SubHeader(props) { - const [isCopied, setCopied] = React.useState(false); + const [isCopied, setCopied] = React.useState(false); - const isAssist = window.location.pathname.includes('/assist/'); + const isAssist = window.location.pathname.includes('/assist/'); - const location = props.currentLocation && props.currentLocation.length > 60 ? `${props.currentLocation.slice(0, 60)}...` : props.currentLocation - return ( -
    - {location && ( -
    { - copy(props.currentLocation); - setCopied(true) - setTimeout(() => setCopied(false), 5000) - }} - > - - - {location} - -
    - )} - {!isAssist ? ( -
    -
    - {props.jiraConfig && props.jiraConfig.token && } -
    -
    - - - Share -
    - } - /> -
    -
    - -
    -
    - -
    -
    -
    -
    - ) : null} + const location = + props.currentLocation && props.currentLocation.length > 60 + ? `${props.currentLocation.slice(0, 60)}...` + : props.currentLocation; + + const toggleNotePopup = () => { + pause(); + }; + return ( +
    + {location && ( +
    { + copy(props.currentLocation); + setCopied(true); + setTimeout(() => setCopied(false), 5000); + }} + > + + + {location} +
    - ) + )} + {!isAssist ? ( +
    +
    + + Add note +
    +
    + {props.jiraConfig && props.jiraConfig.token && } +
    +
    + + + Share +
    + } + /> +
    +
    + +
    +
    + +
    +
    +
    + ) : null} +
    + ); } -const SubH = connectPlayer(state => ({ currentLocation: state.location }))(SubHeader) +const SubH = connectPlayer((state) => ({ currentLocation: state.location }))(SubHeader); -export default React.memo(SubH) +export default React.memo(SubH); diff --git a/frontend/app/components/ui/Checkbox/Checkbox.tsx b/frontend/app/components/ui/Checkbox/Checkbox.tsx index 2b68ccc97..ec401557c 100644 --- a/frontend/app/components/ui/Checkbox/Checkbox.tsx +++ b/frontend/app/components/ui/Checkbox/Checkbox.tsx @@ -2,7 +2,7 @@ import React from 'react'; import cn from 'classnames'; interface Props { - classNam?: string; + className?: string; label?: string; [x: string]: any; } diff --git a/frontend/app/components/ui/SVG.tsx b/frontend/app/components/ui/SVG.tsx index 23aa27e70..e0c59ef8c 100644 --- a/frontend/app/components/ui/SVG.tsx +++ b/frontend/app/components/ui/SVG.tsx @@ -1,7 +1,7 @@ import React from 'react'; -export type IconNames = 'alarm-clock' | 'alarm-plus' | 'all-sessions' | 'analytics' | 'anchor' | 'arrow-alt-square-right' | 'arrow-clockwise' | 'arrow-down' | 'arrow-right-short' | 'arrow-square-left' | 'arrow-square-right' | 'arrow-up' | 'arrows-angle-extend' | 'avatar/icn_bear' | 'avatar/icn_beaver' | 'avatar/icn_bird' | 'avatar/icn_bison' | 'avatar/icn_camel' | 'avatar/icn_chameleon' | 'avatar/icn_deer' | 'avatar/icn_dog' | 'avatar/icn_dolphin' | 'avatar/icn_elephant' | 'avatar/icn_fish' | 'avatar/icn_fox' | 'avatar/icn_gorilla' | 'avatar/icn_hippo' | 'avatar/icn_horse' | 'avatar/icn_hyena' | 'avatar/icn_kangaroo' | 'avatar/icn_lemur' | 'avatar/icn_mammel' | 'avatar/icn_monkey' | 'avatar/icn_moose' | 'avatar/icn_panda' | 'avatar/icn_penguin' | 'avatar/icn_porcupine' | 'avatar/icn_quail' | 'avatar/icn_rabbit' | 'avatar/icn_rhino' | 'avatar/icn_sea_horse' | 'avatar/icn_sheep' | 'avatar/icn_snake' | 'avatar/icn_squirrel' | 'avatar/icn_tapir' | 'avatar/icn_turtle' | 'avatar/icn_vulture' | 'avatar/icn_wild1' | 'avatar/icn_wild_bore' | 'ban' | 'bar-chart-line' | 'bar-pencil' | 'bell-plus' | 'bell' | 'binoculars' | 'book' | 'browser/browser' | 'browser/chrome' | 'browser/edge' | 'browser/electron' | 'browser/facebook' | 'browser/firefox' | 'browser/ie' | 'browser/opera' | 'browser/safari' | 'bullhorn' | 'business-time' | 'calendar-alt' | 'calendar-check' | 'calendar-day' | 'calendar' | 'call' | 'camera-alt' | 'camera-video-off' | 'camera-video' | 'camera' | 'caret-down-fill' | 'caret-left-fill' | 'caret-right-fill' | 'caret-up-fill' | 'chat-dots' | 'chat-right-text' | 'chat-square-quote' | 'check-circle' | 'check' | 'chevron-double-left' | 'chevron-double-right' | 'chevron-down' | 'chevron-left' | 'chevron-right' | 'chevron-up' | 'circle-fill' | 'circle' | 'clipboard-list-check' | 'clock' | 'close' | 'cloud-fog2-fill' | 'code' | 'cog' | 'cogs' | 'collection' | 'columns-gap-filled' | 'columns-gap' | 'console/error' | 'console/exception' | 'console/info' | 'console/warning' | 'console' | 'controller' | 'cookies' | 'copy' | 'credit-card-front' | 'cubes' | 'dashboard-icn' | 'desktop' | 'device' | 'diagram-3' | 'dizzy' | 'doublecheck' | 'download' | 'drag' | 'edit' | 'ellipsis-v' | 'enter' | 'envelope' | 'errors-icon' | 'event/click' | 'event/clickrage' | 'event/code' | 'event/i-cursor' | 'event/input' | 'event/link' | 'event/location' | 'event/resize' | 'event/view' | 'exclamation-circle' | 'expand-wide' | 'explosion' | 'external-link-alt' | 'eye-slash-fill' | 'eye-slash' | 'eye' | 'fetch' | 'file-code' | 'file-medical-alt' | 'file' | 'filter' | 'filters/arrow-return-right' | 'filters/browser' | 'filters/click' | 'filters/clickrage' | 'filters/code' | 'filters/console' | 'filters/country' | 'filters/cpu-load' | 'filters/custom' | 'filters/device' | 'filters/dom-complete' | 'filters/duration' | 'filters/error' | 'filters/fetch-failed' | 'filters/fetch' | 'filters/file-code' | 'filters/graphql' | 'filters/i-cursor' | 'filters/input' | 'filters/lcpt' | 'filters/link' | 'filters/location' | 'filters/memory-load' | 'filters/metadata' | 'filters/os' | 'filters/perfromance-network-request' | 'filters/platform' | 'filters/referrer' | 'filters/resize' | 'filters/rev-id' | 'filters/state-action' | 'filters/ttfb' | 'filters/user-alt' | 'filters/userid' | 'filters/view' | 'flag-na' | 'fullscreen' | 'funnel/cpu-fill' | 'funnel/cpu' | 'funnel/dizzy' | 'funnel/emoji-angry-fill' | 'funnel/emoji-angry' | 'funnel/emoji-dizzy-fill' | 'funnel/exclamation-circle-fill' | 'funnel/exclamation-circle' | 'funnel/file-earmark-break-fill' | 'funnel/file-earmark-break' | 'funnel/file-earmark-minus-fill' | 'funnel/file-earmark-minus' | 'funnel/file-medical-alt' | 'funnel/file-x' | 'funnel/hdd-fill' | 'funnel/hourglass-top' | 'funnel/image-fill' | 'funnel/image' | 'funnel/microchip' | 'funnel/mouse' | 'funnel/patch-exclamation-fill' | 'funnel/sd-card' | 'funnel-fill' | 'funnel-new' | 'funnel' | 'geo-alt-fill-custom' | 'github' | 'graph-up-arrow' | 'graph-up' | 'grid-3x3' | 'grid-check' | 'grid-horizontal' | 'grip-horizontal' | 'hash' | 'hdd-stack' | 'headset' | 'heart-rate' | 'high-engagement' | 'history' | 'hourglass-start' | 'id-card' | 'image' | 'info-circle-fill' | 'info-circle' | 'info-square' | 'info' | 'inspect' | 'integrations/assist' | 'integrations/bugsnag-text' | 'integrations/bugsnag' | 'integrations/cloudwatch-text' | 'integrations/cloudwatch' | 'integrations/datadog' | 'integrations/elasticsearch-text' | 'integrations/elasticsearch' | 'integrations/github' | 'integrations/graphql' | 'integrations/jira-text' | 'integrations/jira' | 'integrations/mobx' | 'integrations/newrelic-text' | 'integrations/newrelic' | 'integrations/ngrx' | 'integrations/openreplay-text' | 'integrations/openreplay' | 'integrations/redux' | 'integrations/rollbar-text' | 'integrations/rollbar' | 'integrations/segment' | 'integrations/sentry-text' | 'integrations/sentry' | 'integrations/slack-bw' | 'integrations/slack' | 'integrations/stackdriver' | 'integrations/sumologic-text' | 'integrations/sumologic' | 'integrations/vuejs' | 'journal-code' | 'layer-group' | 'lightbulb-on' | 'lightbulb' | 'link-45deg' | 'list-alt' | 'list-ul' | 'list' | 'lock-alt' | 'map-marker-alt' | 'memory' | 'mic-mute' | 'mic' | 'minus' | 'mobile' | 'mouse-alt' | 'next1' | 'no-dashboard' | 'no-metrics-chart' | 'no-metrics' | 'os/android' | 'os/chrome_os' | 'os/fedora' | 'os/ios' | 'os/linux' | 'os/mac_os_x' | 'os/other' | 'os/ubuntu' | 'os/windows' | 'os' | 'pause-fill' | 'pause' | 'pdf-download' | 'pencil-stop' | 'pencil' | 'percent' | 'performance-icon' | 'person-fill' | 'person' | 'pie-chart-fill' | 'pin-fill' | 'play-circle-light' | 'play-circle' | 'play-fill-new' | 'play-fill' | 'play-hover' | 'play' | 'plus-circle' | 'plus' | 'prev1' | 'puzzle-piece' | 'question-circle' | 'question-lg' | 'quote-left' | 'quote-right' | 'redo-back' | 'redo' | 'remote-control' | 'replay-10' | 'resources-icon' | 'safe-fill' | 'safe' | 'sandglass' | 'search' | 'search_notification' | 'server' | 'share-alt' | 'shield-lock' | 'signup' | 'skip-forward-fill' | 'skip-forward' | 'slack' | 'slash-circle' | 'sliders' | 'social/slack' | 'social/trello' | 'spinner' | 'star-solid' | 'star' | 'step-forward' | 'stopwatch' | 'store' | 'sync-alt' | 'table-new' | 'table' | 'tablet-android' | 'tachometer-slow' | 'tachometer-slowest' | 'tags' | 'team-funnel' | 'telephone-fill' | 'telephone' | 'text-paragraph' | 'tools' | 'trash' | 'turtle' | 'user-alt' | 'user-circle' | 'user-friends' | 'users' | 'vendors/graphql' | 'vendors/mobx' | 'vendors/ngrx' | 'vendors/redux' | 'vendors/vuex' | 'web-vitals' | 'wifi' | 'window-alt' | 'window-restore' | 'window-x' | 'window' | 'zoom-in'; +export type IconNames = 'alarm-clock' | 'alarm-plus' | 'all-sessions' | 'analytics' | 'anchor' | 'arrow-alt-square-right' | 'arrow-clockwise' | 'arrow-down' | 'arrow-right-short' | 'arrow-square-left' | 'arrow-square-right' | 'arrow-up' | 'arrows-angle-extend' | 'avatar/icn_bear' | 'avatar/icn_beaver' | 'avatar/icn_bird' | 'avatar/icn_bison' | 'avatar/icn_camel' | 'avatar/icn_chameleon' | 'avatar/icn_deer' | 'avatar/icn_dog' | 'avatar/icn_dolphin' | 'avatar/icn_elephant' | 'avatar/icn_fish' | 'avatar/icn_fox' | 'avatar/icn_gorilla' | 'avatar/icn_hippo' | 'avatar/icn_horse' | 'avatar/icn_hyena' | 'avatar/icn_kangaroo' | 'avatar/icn_lemur' | 'avatar/icn_mammel' | 'avatar/icn_monkey' | 'avatar/icn_moose' | 'avatar/icn_panda' | 'avatar/icn_penguin' | 'avatar/icn_porcupine' | 'avatar/icn_quail' | 'avatar/icn_rabbit' | 'avatar/icn_rhino' | 'avatar/icn_sea_horse' | 'avatar/icn_sheep' | 'avatar/icn_snake' | 'avatar/icn_squirrel' | 'avatar/icn_tapir' | 'avatar/icn_turtle' | 'avatar/icn_vulture' | 'avatar/icn_wild1' | 'avatar/icn_wild_bore' | 'ban' | 'bar-chart-line' | 'bar-pencil' | 'bell-plus' | 'bell' | 'binoculars' | 'book' | 'browser/browser' | 'browser/chrome' | 'browser/edge' | 'browser/electron' | 'browser/facebook' | 'browser/firefox' | 'browser/ie' | 'browser/opera' | 'browser/safari' | 'bullhorn' | 'business-time' | 'calendar-alt' | 'calendar-check' | 'calendar-day' | 'calendar' | 'call' | 'camera-alt' | 'camera-video-off' | 'camera-video' | 'camera' | 'caret-down-fill' | 'caret-left-fill' | 'caret-right-fill' | 'caret-up-fill' | 'chat-dots' | 'chat-right-text' | 'chat-square-quote' | 'check-circle' | 'check' | 'chevron-double-left' | 'chevron-double-right' | 'chevron-down' | 'chevron-left' | 'chevron-right' | 'chevron-up' | 'circle-fill' | 'circle' | 'clipboard-list-check' | 'clock' | 'close' | 'cloud-fog2-fill' | 'code' | 'cog' | 'cogs' | 'collection' | 'columns-gap-filled' | 'columns-gap' | 'console/error' | 'console/exception' | 'console/info' | 'console/warning' | 'console' | 'controller' | 'cookies' | 'copy' | 'credit-card-front' | 'cubes' | 'dashboard-icn' | 'desktop' | 'device' | 'diagram-3' | 'dizzy' | 'doublecheck' | 'download' | 'drag' | 'edit' | 'ellipsis-v' | 'enter' | 'envelope' | 'errors-icon' | 'event/click' | 'event/clickrage' | 'event/code' | 'event/i-cursor' | 'event/input' | 'event/link' | 'event/location' | 'event/resize' | 'event/view' | 'exclamation-circle' | 'expand-wide' | 'explosion' | 'external-link-alt' | 'eye-slash-fill' | 'eye-slash' | 'eye' | 'fetch' | 'file-code' | 'file-medical-alt' | 'file' | 'filter' | 'filters/arrow-return-right' | 'filters/browser' | 'filters/click' | 'filters/clickrage' | 'filters/code' | 'filters/console' | 'filters/country' | 'filters/cpu-load' | 'filters/custom' | 'filters/device' | 'filters/dom-complete' | 'filters/duration' | 'filters/error' | 'filters/fetch-failed' | 'filters/fetch' | 'filters/file-code' | 'filters/graphql' | 'filters/i-cursor' | 'filters/input' | 'filters/lcpt' | 'filters/link' | 'filters/location' | 'filters/memory-load' | 'filters/metadata' | 'filters/os' | 'filters/perfromance-network-request' | 'filters/platform' | 'filters/referrer' | 'filters/resize' | 'filters/rev-id' | 'filters/state-action' | 'filters/ttfb' | 'filters/user-alt' | 'filters/userid' | 'filters/view' | 'flag-na' | 'fullscreen' | 'funnel/cpu-fill' | 'funnel/cpu' | 'funnel/dizzy' | 'funnel/emoji-angry-fill' | 'funnel/emoji-angry' | 'funnel/emoji-dizzy-fill' | 'funnel/exclamation-circle-fill' | 'funnel/exclamation-circle' | 'funnel/file-earmark-break-fill' | 'funnel/file-earmark-break' | 'funnel/file-earmark-minus-fill' | 'funnel/file-earmark-minus' | 'funnel/file-medical-alt' | 'funnel/file-x' | 'funnel/hdd-fill' | 'funnel/hourglass-top' | 'funnel/image-fill' | 'funnel/image' | 'funnel/microchip' | 'funnel/mouse' | 'funnel/patch-exclamation-fill' | 'funnel/sd-card' | 'funnel-fill' | 'funnel-new' | 'funnel' | 'geo-alt-fill-custom' | 'github' | 'graph-up-arrow' | 'graph-up' | 'grid-3x3' | 'grid-check' | 'grid-horizontal' | 'grip-horizontal' | 'hash' | 'hdd-stack' | 'headset' | 'heart-rate' | 'high-engagement' | 'history' | 'hourglass-start' | 'id-card' | 'image' | 'info-circle-fill' | 'info-circle' | 'info-square' | 'info' | 'inspect' | 'integrations/assist' | 'integrations/bugsnag-text' | 'integrations/bugsnag' | 'integrations/cloudwatch-text' | 'integrations/cloudwatch' | 'integrations/datadog' | 'integrations/elasticsearch-text' | 'integrations/elasticsearch' | 'integrations/github' | 'integrations/graphql' | 'integrations/jira-text' | 'integrations/jira' | 'integrations/mobx' | 'integrations/newrelic-text' | 'integrations/newrelic' | 'integrations/ngrx' | 'integrations/openreplay-text' | 'integrations/openreplay' | 'integrations/redux' | 'integrations/rollbar-text' | 'integrations/rollbar' | 'integrations/segment' | 'integrations/sentry-text' | 'integrations/sentry' | 'integrations/slack-bw' | 'integrations/slack' | 'integrations/stackdriver' | 'integrations/sumologic-text' | 'integrations/sumologic' | 'integrations/vuejs' | 'journal-code' | 'layer-group' | 'lightbulb-on' | 'lightbulb' | 'link-45deg' | 'list-alt' | 'list-ul' | 'list' | 'lock-alt' | 'map-marker-alt' | 'memory' | 'mic-mute' | 'mic' | 'minus' | 'mobile' | 'mouse-alt' | 'next1' | 'no-dashboard' | 'no-metrics-chart' | 'no-metrics' | 'os/android' | 'os/chrome_os' | 'os/fedora' | 'os/ios' | 'os/linux' | 'os/mac_os_x' | 'os/other' | 'os/ubuntu' | 'os/windows' | 'os' | 'pause-fill' | 'pause' | 'pdf-download' | 'pencil-stop' | 'pencil' | 'percent' | 'performance-icon' | 'person-fill' | 'person' | 'pie-chart-fill' | 'pin-fill' | 'play-circle-light' | 'play-circle' | 'play-fill-new' | 'play-fill' | 'play-hover' | 'play' | 'plus-circle' | 'plus' | 'prev1' | 'puzzle-piece' | 'question-circle' | 'question-lg' | 'quote-left' | 'quote-right' | 'quotes' | 'redo-back' | 'redo' | 'remote-control' | 'replay-10' | 'resources-icon' | 'safe-fill' | 'safe' | 'sandglass' | 'search' | 'search_notification' | 'server' | 'share-alt' | 'shield-lock' | 'signup' | 'skip-forward-fill' | 'skip-forward' | 'slack' | 'slash-circle' | 'sliders' | 'social/slack' | 'social/trello' | 'spinner' | 'star-solid' | 'star' | 'step-forward' | 'stopwatch' | 'store' | 'sync-alt' | 'table-new' | 'table' | 'tablet-android' | 'tachometer-slow' | 'tachometer-slowest' | 'tags' | 'team-funnel' | 'telephone-fill' | 'telephone' | 'text-paragraph' | 'tools' | 'trash' | 'turtle' | 'user-alt' | 'user-circle' | 'user-friends' | 'users' | 'vendors/graphql' | 'vendors/mobx' | 'vendors/ngrx' | 'vendors/redux' | 'vendors/vuex' | 'web-vitals' | 'wifi' | 'window-alt' | 'window-restore' | 'window-x' | 'window' | 'zoom-in'; interface Props { name: IconNames; @@ -331,6 +331,7 @@ const SVG = (props: Props) => { case 'question-lg': return ; case 'quote-left': return ; case 'quote-right': return ; + case 'quotes': return ; case 'redo-back': return ; case 'redo': return ; case 'remote-control': return ; diff --git a/frontend/app/duck/sessions.js b/frontend/app/duck/sessions.js index fe8927097..746b99e0e 100644 --- a/frontend/app/duck/sessions.js +++ b/frontend/app/duck/sessions.js @@ -63,7 +63,8 @@ const initialState = Map({ timelinePointer: null, sessionPath: {}, lastPlayedSessionId: null, - timeLineTooltip: { time: 0, offset: 0, isVisible: false } + timeLineTooltip: { time: 0, offset: 0, isVisible: false }, + noteTooltip: { time: 100, offset: 100, isVisible: true }, }); const reducer = (state = initialState, action = {}) => { diff --git a/frontend/app/logger/index.js b/frontend/app/logger/index.js index d119c139d..084d84a56 100644 --- a/frontend/app/logger/index.js +++ b/frontend/app/logger/index.js @@ -18,10 +18,23 @@ function error(...args) { } } +let groupTm = null; + + function group(...args) { + if (!window.env.PRODUCTION || options.verbose) { + if (!groupTm) { + groupTm = setTimeout(() => console.groupEnd(), 500) + console.groupCollapsed('Openreplay: Skipping session messages') + } + console.log(...args); + } +} + export default { info: log, log, warn, error, + group, } diff --git a/frontend/app/player/MessageDistributor/messages/MFileReader.ts b/frontend/app/player/MessageDistributor/messages/MFileReader.ts index 9db9c2cff..96ba11c36 100644 --- a/frontend/app/player/MessageDistributor/messages/MFileReader.ts +++ b/frontend/app/player/MessageDistributor/messages/MFileReader.ts @@ -4,7 +4,7 @@ import logger from 'App/logger'; import RawMessageReader from './RawMessageReader'; // TODO: composition instead of inheritance -// needSkipMessage() and next() methods here use buf and p protected properties, +// needSkipMessage() and next() methods here use buf and p protected properties, // which should be probably somehow incapsulated export default class MFileReader extends RawMessageReader { private pLastMessageID: number = 0 @@ -49,7 +49,7 @@ export default class MFileReader extends RawMessageReader { if (!skippedMessage) { return null } - logger.log("Skipping message: ", skippedMessage) + logger.group("Skipping message: ", skippedMessage) } this.pLastMessageID = this.p @@ -65,7 +65,7 @@ export default class MFileReader extends RawMessageReader { } this.currentTime = rMsg.timestamp - this.startTime return this.next() - } + } const msg = Object.assign(rMsg, { time: this.currentTime, diff --git a/frontend/app/svg/icons/quotes.svg b/frontend/app/svg/icons/quotes.svg new file mode 100644 index 000000000..252b3b50f --- /dev/null +++ b/frontend/app/svg/icons/quotes.svg @@ -0,0 +1,3 @@ + + + From d1a325c4139205eee0f962f80da08a4dc094172d Mon Sep 17 00:00:00 2001 From: sylenien Date: Tue, 27 Sep 2022 14:35:15 +0200 Subject: [PATCH 080/592] feat(ui): add notes list to default page --- .../SessionListContainer.tsx | 5 +- .../components/Notes/NoteItem.tsx | 46 +++++++++++ .../components/Notes/NoteList.tsx | 78 +++++++++++++++++++ .../components/Notes/index.ts | 1 + .../SessionHeader/SessionHeader.tsx | 44 +++++++---- 5 files changed, 158 insertions(+), 16 deletions(-) create mode 100644 frontend/app/components/shared/SessionListContainer/components/Notes/NoteItem.tsx create mode 100644 frontend/app/components/shared/SessionListContainer/components/Notes/NoteList.tsx create mode 100644 frontend/app/components/shared/SessionListContainer/components/Notes/index.ts diff --git a/frontend/app/components/shared/SessionListContainer/SessionListContainer.tsx b/frontend/app/components/shared/SessionListContainer/SessionListContainer.tsx index 0b8bc35c6..41a033088 100644 --- a/frontend/app/components/shared/SessionListContainer/SessionListContainer.tsx +++ b/frontend/app/components/shared/SessionListContainer/SessionListContainer.tsx @@ -1,13 +1,16 @@ import React from 'react'; import SessionList from './components/SessionList'; import SessionHeader from './components/SessionHeader'; +import NotesList from './components/Notes/NoteList'; function SessionListContainer() { return (
    - + {/* */} + +
    ); } diff --git a/frontend/app/components/shared/SessionListContainer/components/Notes/NoteItem.tsx b/frontend/app/components/shared/SessionListContainer/components/Notes/NoteItem.tsx new file mode 100644 index 000000000..e05cfac10 --- /dev/null +++ b/frontend/app/components/shared/SessionListContainer/components/Notes/NoteItem.tsx @@ -0,0 +1,46 @@ +import React from 'react' +import { Icon } from 'UI' +import PlayLink from 'Shared/SessionItem/PlayLink' + +enum Tags { + QUERY, + ISSUE, + TASK, + OTHER +} + +interface Props { + author: string + date: string + tag: Tags + isPrivate: boolean + description: string + sessionId: string +} + +function NoteItem(props: Props) { + + return ( +
    +
    +
    {props.description}
    +
    +
    {props.tag}
    +
    + By + {props.author}, {props.date} + {props.isPrivate ? null : ( + <> + Team + + )} +
    +
    +
    +
    +
    +
    + ) +} + +export default NoteItem diff --git a/frontend/app/components/shared/SessionListContainer/components/Notes/NoteList.tsx b/frontend/app/components/shared/SessionListContainer/components/Notes/NoteList.tsx new file mode 100644 index 000000000..7695c968c --- /dev/null +++ b/frontend/app/components/shared/SessionListContainer/components/Notes/NoteList.tsx @@ -0,0 +1,78 @@ +import React from 'react'; +import { NoContent, Pagination, Icon } from 'UI'; +import { sliceListPerPage } from 'App/utils'; +import NoteItem from './NoteItem'; + +//{ siteId }: { siteId: string } +function NotesList() { + const list = [ + { + author: 'nikita@openreplay.com', + date: 'Today, 12.00PM', + tag: 1, + isPrivate: true, + description: 'Testing private note stuff bla bla bla', + sessionId: '123123123', + id: 2, + }, + { + author: 'sasha@openreplay.com', + date: 'Tomorrow, 12.00PM', + tag: 0, + isPrivate: false, + description: 'Not Testing team note stuff bla bla bla', + sessionId: '123123123', + id: 1, + }, + ]; + + const store = { + page: 1, + pageSize: 10, + // @ts-ignore + updateKey: (a, b) => 1, + }; + + return ( + + +
    No notes yet
    +
    + } + > +
    + {sliceListPerPage(list, store.page - 1, store.pageSize).map((note) => ( + + + + ))} +
    + +
    +
    + Showing {Math.min(list.length, store.pageSize)} out + of {list.length} notes +
    + store.updateKey('page', page)} + limit={store.pageSize} + debounceRequest={100} + /> +
    + + ); +} + +export default NotesList; diff --git a/frontend/app/components/shared/SessionListContainer/components/Notes/index.ts b/frontend/app/components/shared/SessionListContainer/components/Notes/index.ts new file mode 100644 index 000000000..49327f792 --- /dev/null +++ b/frontend/app/components/shared/SessionListContainer/components/Notes/index.ts @@ -0,0 +1 @@ +export { default } from './NoteList' diff --git a/frontend/app/components/shared/SessionListContainer/components/SessionHeader/SessionHeader.tsx b/frontend/app/components/shared/SessionListContainer/components/SessionHeader/SessionHeader.tsx index e991c8c31..b22500419 100644 --- a/frontend/app/components/shared/SessionListContainer/components/SessionHeader/SessionHeader.tsx +++ b/frontend/app/components/shared/SessionListContainer/components/SessionHeader/SessionHeader.tsx @@ -10,10 +10,22 @@ import cn from 'classnames'; import { setActiveTab } from 'Duck/search'; import SessionSettingButton from '../SessionSettingButton'; +// @ts-ignore +const Tab = ({ addBorder, onClick, children }) => ( +
    + {children} +
    +) + interface Props { listCount: number; filter: any; - isBookmark: any; + activeTab: string; isEnterprise: boolean; applyFilter: (filter: any) => void; setActiveTab: (tab: any) => void; @@ -21,7 +33,7 @@ interface Props { function SessionHeader(props: Props) { const { filter: { startDate, endDate, rangeValue }, - isBookmark, + activeTab, isEnterprise, } = props; @@ -35,27 +47,29 @@ function SessionHeader(props: Props) { return (
    -
    -
    + props.setActiveTab({ type: 'all' })} + addBorder={activeTab === 'all'} > SESSIONS -
    -
    + props.setActiveTab({ type: 'bookmark' })} + addBorder={activeTab === 'bookmark'} > {`${isEnterprise ? 'VAULT' : 'BOOKMARKS'}`} -
    + + props.setActiveTab({ type: 'notes' })} + > + NOTES +
    - {!isBookmark &&
    + {activeTab === 'all' &&
    @@ -71,7 +85,7 @@ export default connect( (state: any) => ({ filter: state.getIn(['search', 'instance']), listCount: numberWithCommas(state.getIn(['sessions', 'total'])), - isBookmark: state.getIn(['search', 'activeTab', 'type']) === 'bookmark', + activeTab: state.getIn(['search', 'activeTab', 'type']), isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee', }), { applyFilter, setActiveTab } From 266a0bef7eec354cf2d361e0dbb405527b488b94 Mon Sep 17 00:00:00 2001 From: sylenien Date: Wed, 28 Sep 2022 11:08:26 +0200 Subject: [PATCH 081/592] feat(ui): add notes store --- .../SessionListContainer.tsx | 9 +- .../components/Notes/NoteItem.tsx | 8 +- .../components/Notes/NoteList.tsx | 63 +++++--------- frontend/app/mstore/index.tsx | 83 +++++++++++-------- frontend/app/mstore/notesStore.ts | 72 ++++++++++++++++ frontend/app/services/NotesService.ts | 54 ++++++++++++ frontend/app/services/index.ts | 8 +- 7 files changed, 209 insertions(+), 88 deletions(-) create mode 100644 frontend/app/mstore/notesStore.ts create mode 100644 frontend/app/services/NotesService.ts diff --git a/frontend/app/components/shared/SessionListContainer/SessionListContainer.tsx b/frontend/app/components/shared/SessionListContainer/SessionListContainer.tsx index 41a033088..2e59e3b5e 100644 --- a/frontend/app/components/shared/SessionListContainer/SessionListContainer.tsx +++ b/frontend/app/components/shared/SessionListContainer/SessionListContainer.tsx @@ -2,17 +2,16 @@ import React from 'react'; import SessionList from './components/SessionList'; import SessionHeader from './components/SessionHeader'; import NotesList from './components/Notes/NoteList'; +import { connect } from 'react-redux' -function SessionListContainer() { +function SessionListContainer({ activeTab }: { activeTab: string }) { return (
    - {/* */} - - + {activeTab !== 'notes' ? : }
    ); } -export default SessionListContainer; +export default connect(state => ({ activeTab: state.getIn(['search', 'activeTab', 'type'])}))(SessionListContainer); diff --git a/frontend/app/components/shared/SessionListContainer/components/Notes/NoteItem.tsx b/frontend/app/components/shared/SessionListContainer/components/Notes/NoteItem.tsx index e05cfac10..05a77b25d 100644 --- a/frontend/app/components/shared/SessionListContainer/components/Notes/NoteItem.tsx +++ b/frontend/app/components/shared/SessionListContainer/components/Notes/NoteItem.tsx @@ -11,8 +11,8 @@ enum Tags { interface Props { author: string - date: string - tag: Tags + timestamp: number + tags: string[] isPrivate: boolean description: string sessionId: string @@ -25,10 +25,10 @@ function NoteItem(props: Props) {
    {props.description}
    -
    {props.tag}
    +
    {props.tags}
    By - {props.author}, {props.date} + {props.author}, {props.timestamp} {props.isPrivate ? null : ( <> Team diff --git a/frontend/app/components/shared/SessionListContainer/components/Notes/NoteList.tsx b/frontend/app/components/shared/SessionListContainer/components/Notes/NoteList.tsx index 7695c968c..a7a2cd215 100644 --- a/frontend/app/components/shared/SessionListContainer/components/Notes/NoteList.tsx +++ b/frontend/app/components/shared/SessionListContainer/components/Notes/NoteList.tsx @@ -2,37 +2,20 @@ import React from 'react'; import { NoContent, Pagination, Icon } from 'UI'; import { sliceListPerPage } from 'App/utils'; import NoteItem from './NoteItem'; +import { observer } from 'mobx-react-lite'; +import { useStore } from 'App/mstore'; -//{ siteId }: { siteId: string } function NotesList() { - const list = [ - { - author: 'nikita@openreplay.com', - date: 'Today, 12.00PM', - tag: 1, - isPrivate: true, - description: 'Testing private note stuff bla bla bla', - sessionId: '123123123', - id: 2, - }, - { - author: 'sasha@openreplay.com', - date: 'Tomorrow, 12.00PM', - tag: 0, - isPrivate: false, - description: 'Not Testing team note stuff bla bla bla', - sessionId: '123123123', - id: 1, - }, - ]; + const { notesStore } = useStore() - const store = { - page: 1, - pageSize: 10, - // @ts-ignore - updateKey: (a, b) => 1, - }; + React.useEffect(() => { + if (!notesStore.notes.length) { + notesStore.fetchNotes() + } + }, []) + const list = notesStore.notes + console.log(list) return (
    - {sliceListPerPage(list, store.page - 1, store.pageSize).map((note) => ( - + {sliceListPerPage(list, notesStore.page - 1, notesStore.pageSize).map(note => ( + ))} @@ -60,14 +43,14 @@ function NotesList() {
    - Showing {Math.min(list.length, store.pageSize)} out + Showing {Math.min(list.length, notesStore.pageSize)} out of {list.length} notes
    store.updateKey('page', page)} - limit={store.pageSize} + page={notesStore.page} + totalPages={Math.ceil(list.length / notesStore.pageSize)} + onPageChange={(page) => notesStore.changePage(page)} + limit={notesStore.pageSize} debounceRequest={100} />
    @@ -75,4 +58,4 @@ function NotesList() { ); } -export default NotesList; +export default observer(NotesList); diff --git a/frontend/app/mstore/index.tsx b/frontend/app/mstore/index.tsx index 26533ee4f..bd04801eb 100644 --- a/frontend/app/mstore/index.tsx +++ b/frontend/app/mstore/index.tsx @@ -5,56 +5,67 @@ import UserStore from './userStore'; import RoleStore from './roleStore'; import APIClient from 'App/api_client'; import FunnelStore from './funnelStore'; -import { dashboardService, metricService, sessionService, userService, auditService, funnelService, errorService } from 'App/services'; +import { + dashboardService, + metricService, + sessionService, + userService, + auditService, + funnelService, + errorService, + notesService, +} from 'App/services'; import SettingsStore from './settingsStore'; import AuditStore from './auditStore'; import NotificationStore from './notificationStore'; import ErrorStore from './errorStore'; import SessionStore from './sessionStore'; +import NotesStore from './notesStore'; export class RootStore { - dashboardStore: DashboardStore; - metricStore: MetricStore; - funnelStore: FunnelStore; - settingsStore: SettingsStore; - userStore: UserStore; - roleStore: RoleStore; - auditStore: AuditStore; - errorStore: ErrorStore; - notificationStore: NotificationStore - sessionStore: SessionStore; + dashboardStore: DashboardStore; + metricStore: MetricStore; + funnelStore: FunnelStore; + settingsStore: SettingsStore; + userStore: UserStore; + roleStore: RoleStore; + auditStore: AuditStore; + errorStore: ErrorStore; + notificationStore: NotificationStore; + sessionStore: SessionStore; + notesStore: NotesStore; - constructor() { - this.dashboardStore = new DashboardStore(); - this.metricStore = new MetricStore(); - this.funnelStore = new FunnelStore(); - this.settingsStore = new SettingsStore(); - this.userStore = new UserStore(); - this.roleStore = new RoleStore(); - this.auditStore = new AuditStore(); - this.errorStore = new ErrorStore(); - this.notificationStore = new NotificationStore(); - this.sessionStore = new SessionStore(); - } + constructor() { + this.dashboardStore = new DashboardStore(); + this.metricStore = new MetricStore(); + this.funnelStore = new FunnelStore(); + this.settingsStore = new SettingsStore(); + this.userStore = new UserStore(); + this.roleStore = new RoleStore(); + this.auditStore = new AuditStore(); + this.errorStore = new ErrorStore(); + this.notificationStore = new NotificationStore(); + this.sessionStore = new SessionStore(); + this.notesStore = new NotesStore(); + } - initClient() { - const client = new APIClient(); - dashboardService.initClient(client) - metricService.initClient(client) - funnelService.initClient(client) - sessionService.initClient(client) - userService.initClient(client) - auditService.initClient(client) - errorService.initClient(client) - } + initClient() { + const client = new APIClient(); + dashboardService.initClient(client); + metricService.initClient(client); + funnelService.initClient(client); + sessionService.initClient(client); + userService.initClient(client); + auditService.initClient(client); + errorService.initClient(client); + notesService.initClient(client) + } } const StoreContext = React.createContext({} as RootStore); export const StoreProvider = ({ children, store }: any) => { - return ( - {children} - ); + return {children}; }; export const useStore = () => React.useContext(StoreContext); diff --git a/frontend/app/mstore/notesStore.ts b/frontend/app/mstore/notesStore.ts new file mode 100644 index 000000000..a309c5ba6 --- /dev/null +++ b/frontend/app/mstore/notesStore.ts @@ -0,0 +1,72 @@ +import { makeAutoObservable } from "mobx" +import { notesService } from "App/services" +import { Note } from 'App/services/NotesService' + +interface SessionNotes { + [sessionId: string]: Note[] +} + +export default class NotesStore { + notes: Note[] = [] + sessionNotes: SessionNotes + loading: boolean + page = 1 + pageSize = 15 + + + constructor() { + makeAutoObservable(this) + } + + async fetchNotes() { + this.loading = true + try { + const notes = await notesService.getNotes() + this.notes = notes; + } catch (e) { + console.error(e) + } finally { + this.loading = false + } + } + + async fetchSessionNotes(sessionId: string) { + this.loading = true + try { + const notes = await notesService.getNotesBySessionId(sessionId) + this.sessionNotes[sessionId] = notes + } catch (e) { + console.error(e) + } finally { + this.loading = false + } + } + + async addNote(sessionId: string, note: Note) { + this.loading = true + try { + const addedNote = await notesService.addNote(sessionId, note) + return addedNote + } catch (e) { + console.error(e) + } finally { + this.loading = false + } + } + + async deleteNote(noteId: string) { + this.loading = true + try { + const deleted = await notesService.deleteNote(noteId) + return deleted + } catch (e) { + console.error(e) + } finally { + this.loading = false + } + } + + changePage(page: number) { + this.page = page + } +} diff --git a/frontend/app/services/NotesService.ts b/frontend/app/services/NotesService.ts new file mode 100644 index 000000000..075b99f5e --- /dev/null +++ b/frontend/app/services/NotesService.ts @@ -0,0 +1,54 @@ + + +import APIClient from 'App/api_client'; + +export interface Note { + message: string + tags: string[] + isPublic: boolean + timestamp: number + noteId?: string + author?: string +} + +export default class NotesService { + private client: APIClient; + + constructor(client?: APIClient) { + this.client = client ? client : new APIClient(); + } + + initClient(client?: APIClient) { + this.client = client || new APIClient(); + } + + getNotes(): Promise { + return this.client.get('/notes').then(r => { + if (r.ok) { + return r.json() + } else { + throw new Error('Error getting notes: ' + r.status) + } + }) + } + + getNotesBySessionId(sessionID: string): Promise { + return this.client.get(`/sessions/${sessionID}/notes`) + .then(r => r.json()) + } + + addNote(sessionID: string, note: Note): Promise { + return this.client.post(`/sessions/${sessionID}/notes`, note) + .then(r => r.json()) + } + + updateNote(noteID: string, note: Note): Promise { + return this.client.post(`/notes/${noteID}`, note) + .then(r => r.json()) + } + + deleteNote(noteID: string) { + return this.client.delete(`/notes/${noteID}`) + .then(r => r.json()) + } +} diff --git a/frontend/app/services/index.ts b/frontend/app/services/index.ts index add371643..6de2d300b 100644 --- a/frontend/app/services/index.ts +++ b/frontend/app/services/index.ts @@ -1,10 +1,11 @@ -import DashboardService, { IDashboardService } from "./DashboardService"; -import MetricService, { IMetricService } from "./MetricService"; -import FunnelService, { IFunnelService } from "./FunnelService"; +import DashboardService from "./DashboardService"; +import MetricService from "./MetricService"; +import FunnelService from "./FunnelService"; import SessionSerivce from "./SessionService"; import UserService from "./UserService"; import AuditService from './AuditService'; import ErrorService from "./ErrorService"; +import NotesService from "./NotesService"; export const dashboardService = new DashboardService(); export const metricService = new MetricService(); @@ -13,3 +14,4 @@ export const userService = new UserService(); export const funnelService = new FunnelService(); export const auditService = new AuditService(); export const errorService = new ErrorService(); +export const notesService = new NotesService(); From 65a4b1ca93c749a4199a634bce08eb797d3ea5e1 Mon Sep 17 00:00:00 2001 From: sylenien Date: Thu, 29 Sep 2022 15:09:39 +0200 Subject: [PATCH 082/592] feat(ui): fix adding/removing to the lists, add options popup, inject notes dynamically --- frontend/app/api_client.js | 7 +- .../Session/Layout/Player/TimeTracker.js | 7 +- frontend/app/components/Session/WebPlayer.js | 9 +- .../Session_/EventsBlock/EventGroupWrapper.js | 22 +++- .../Session_/EventsBlock/EventsBlock.js | 20 +++- .../Session_/EventsBlock/NoteEvent.tsx | 102 ++++++++++++++++ .../Session_/Player/Controls/Timeline.js | 29 +++-- .../Controls/components/NoteTooltip.tsx | 112 ++++++++++++++---- .../Controls/components/PlayerControls.tsx | 1 + frontend/app/components/Session_/Subheader.js | 18 +-- .../Session_/components/NotePopup.tsx | 34 ++++++ .../shared/SessionItem/PlayLink/PlayLink.tsx | 2 +- .../SessionListContainer.tsx | 40 +++++-- .../components/Notes/NoteItem.tsx | 106 ++++++++++++----- .../components/Notes/NoteList.tsx | 14 ++- .../app/components/ui/ItemMenu/ItemMenu.js | 3 +- frontend/app/duck/search.js | 2 + frontend/app/duck/sessions.js | 42 ++++++- frontend/app/mstore/notesStore.ts | 9 +- frontend/app/player/Player.ts | 11 ++ frontend/app/player/singletone.js | 2 + frontend/app/services/NotesService.ts | 67 +++++++++-- frontend/app/types/session/session.ts | 28 ++++- 23 files changed, 552 insertions(+), 135 deletions(-) create mode 100644 frontend/app/components/Session_/EventsBlock/NoteEvent.tsx create mode 100644 frontend/app/components/Session_/components/NotePopup.tsx diff --git a/frontend/app/api_client.js b/frontend/app/api_client.js index 33f7ffe66..0e4699359 100644 --- a/frontend/app/api_client.js +++ b/frontend/app/api_client.js @@ -26,6 +26,7 @@ const siteIdRequiredPaths = [ '/dashboards', '/metrics', '/unprocessed', + '/notes', // '/custom_metrics/sessions', ]; @@ -37,7 +38,7 @@ const noStoringFetchPathStarts = [ // null? export const clean = (obj, forbidenValues = [ undefined, '' ]) => { - const keys = Array.isArray(obj) + const keys = Array.isArray(obj) ? new Array(obj.length).fill().map((_, i) => i) : Object.keys(obj); const retObj = Array.isArray(obj) ? [] : {}; @@ -49,7 +50,7 @@ export const clean = (obj, forbidenValues = [ undefined, '' ]) => { retObj[key] = value; } }); - + return retObj; } @@ -70,7 +71,7 @@ export default class APIClient { this.siteId = siteId; } - fetch(path, params, options = { clean: true }) { + fetch(path, params, options = { clean: true }) { if (params !== undefined) { const cleanedParams = options.clean ? clean(params) : params; this.init.body = JSON.stringify(cleanedParams); diff --git a/frontend/app/components/Session/Layout/Player/TimeTracker.js b/frontend/app/components/Session/Layout/Player/TimeTracker.js index 731f414ac..d9927a921 100644 --- a/frontend/app/components/Session/Layout/Player/TimeTracker.js +++ b/frontend/app/components/Session/Layout/Player/TimeTracker.js @@ -1,22 +1,21 @@ import React from 'react'; import { observer } from 'mobx-react-lite'; -import { connectPlayer } from 'Player'; import cls from './timeTracker.module.css'; function TimeTracker({ player, scale }) { - return ( + return ( <>
    ); } -export default observer(TimeTracker); \ No newline at end of file +export default observer(TimeTracker); diff --git a/frontend/app/components/Session/WebPlayer.js b/frontend/app/components/Session/WebPlayer.js index b5b1f86af..6ef5bbba1 100644 --- a/frontend/app/components/Session/WebPlayer.js +++ b/frontend/app/components/Session/WebPlayer.js @@ -3,11 +3,11 @@ import { connect } from 'react-redux'; import { Loader } from 'UI'; import { toggleFullscreen, closeBottomBlock } from 'Duck/components/player'; import { fetchList } from 'Duck/integrations'; -import { PlayerProvider, connectPlayer, init as initPlayer, clean as cleanPlayer, Controls } from 'Player'; +import { PlayerProvider, injectNotes, connectPlayer, init as initPlayer, clean as cleanPlayer, Controls } from 'Player'; import cn from 'classnames'; import RightBlock from './RightBlock'; import withLocationHandlers from 'HOCs/withLocationHandlers'; - +import { useStore } from 'App/mstore' import PlayerBlockHeader from '../Session_/PlayerBlockHeader'; import PlayerBlock from '../Session_/PlayerBlock'; import styles from '../Session_/session.module.css'; @@ -62,6 +62,7 @@ function RightMenu({ live, tabs, activeTab, setActiveTab, fullscreen }) { function WebPlayer(props) { const { session, toggleFullscreen, closeBottomBlock, live, fullscreen, jwt, fetchList } = props; + const { notesStore } = useStore() const [activeTab, setActiveTab] = useState(''); @@ -69,6 +70,10 @@ function WebPlayer(props) { fetchList('issues'); initPlayer(session, jwt); + notesStore.fetchSessionNotes(session.sessionId).then(r => { + injectNotes(r) + }) + const jumptTime = props.query.get('jumpto'); if (jumptTime) { Controls.jump(parseInt(jumptTime)); diff --git a/frontend/app/components/Session_/EventsBlock/EventGroupWrapper.js b/frontend/app/components/Session_/EventsBlock/EventGroupWrapper.js index c68dac880..49694ca5f 100644 --- a/frontend/app/components/Session_/EventsBlock/EventGroupWrapper.js +++ b/frontend/app/components/Session_/EventsBlock/EventGroupWrapper.js @@ -6,10 +6,11 @@ import withToggle from 'HOCs/withToggle'; import { TYPES } from 'Types/session/event'; import Event from './Event' import stl from './eventGroupWrapper.module.css'; +import NoteEvent from './NoteEvent'; // TODO: incapsulate toggler in LocationEvent @withToggle("showLoadInfo", "toggleLoadInfo") -class EventGroupWrapper extends React.PureComponent { +class EventGroupWrapper extends React.Component { toggleLoadInfo = (e) => { e.stopPropagation(); @@ -42,6 +43,8 @@ class EventGroupWrapper extends React.PureComponent { showLoadInfo, isFirst, presentInSearch, + isNote, + filterOutNote, } = this.props; const isLocation = event.type === TYPES.LOCATION; @@ -64,7 +67,19 @@ class EventGroupWrapper extends React.PureComponent {
    } - { isLocation + {isNote ? ( + + ) : isLocation ? - } + />}
    ) } diff --git a/frontend/app/components/Session_/EventsBlock/EventsBlock.js b/frontend/app/components/Session_/EventsBlock/EventsBlock.js index e690ce3cc..7fd9cc75e 100644 --- a/frontend/app/components/Session_/EventsBlock/EventsBlock.js +++ b/frontend/app/components/Session_/EventsBlock/EventsBlock.js @@ -5,7 +5,7 @@ import { Icon } from 'UI'; import { List, AutoSizer, CellMeasurer, CellMeasurerCache } from "react-virtualized"; import { TYPES } from 'Types/session/event'; import { setSelected } from 'Duck/events'; -import { setEventFilter } from 'Duck/sessions'; +import { setEventFilter, filterOutNote } from 'Duck/sessions'; import { show as showTargetDefiner } from 'Duck/components/targetDefiner'; import EventGroupWrapper from './EventGroupWrapper'; import styles from './eventsBlock.module.css'; @@ -21,7 +21,8 @@ import EventSearch from './EventSearch/EventSearch'; }), { showTargetDefiner, setSelected, - setEventFilter + setEventFilter, + filterOutNote }) export default class EventsBlock extends React.PureComponent { state = { @@ -123,21 +124,28 @@ export default class EventsBlock extends React.PureComponent { onMouseOver = () => this.setState({ mouseOver: true }) onMouseLeave = () => this.setState({ mouseOver: false }) + get eventsList() { + const { session: { notesWithEvents }, filteredEvents } = this.props + const usedEvents = filteredEvents || notesWithEvents + + return usedEvents + } + renderGroup = ({ index, key, style, parent }) => { const { - session: { events }, selectedEvents, currentTimeEventIndex, testsAvaliable, playing, eventsIndex, - filteredEvents + filterOutNote, } = this.props; const { query } = this.state; - const _events = filteredEvents || events; + const _events = this.eventsList const isLastEvent = index === _events.size - 1; const isLastInGroup = isLastEvent || _events.get(index + 1).type === TYPES.LOCATION; const event = _events.get(index); + const isNote = !!event.noteId const isSelected = selectedEvents.includes(event); const isCurrent = index === currentTimeEventIndex; const isEditing = this.state.editingEvent === event; @@ -166,6 +174,8 @@ export default class EventsBlock extends React.PureComponent { isCurrent={ isCurrent } isEditing={ isEditing } showSelection={ testsAvaliable && !playing } + isNote={isNote} + filterOutNote={filterOutNote} />
    )} diff --git a/frontend/app/components/Session_/EventsBlock/NoteEvent.tsx b/frontend/app/components/Session_/EventsBlock/NoteEvent.tsx new file mode 100644 index 000000000..ae7a7245a --- /dev/null +++ b/frontend/app/components/Session_/EventsBlock/NoteEvent.tsx @@ -0,0 +1,102 @@ +import React from 'react'; +import { Icon } from 'UI'; +import { tagProps, iTag } from 'App/services/NotesService'; +import { formatTimeOrDate } from 'App/date'; +import { useStore } from 'App/mstore'; +import { observer } from 'mobx-react-lite'; +import { ItemMenu } from 'UI'; +import copy from 'copy-to-clipboard'; +import { toast } from 'react-toastify'; +import { session } from 'App/routes'; +import { confirm } from 'UI'; +import { filterOutNote as filterOutTimelineNote } from 'Player'; + +interface Props { + userId: number; + timestamp: number; + tags: iTag[]; + isPublic: boolean; + message: string; + sessionId: string; + date: string; + noteId: number; + filterOutNote: (id: number) => void; +} + +function NoteEvent(props: Props) { + const { settingsStore, notesStore } = useStore(); + const { timezone } = settingsStore.sessionSettings; + + const onEdit = () => {}; + + const onCopy = () => { + copy(`${session(props.sessionId)}${props.timestamp > 0 ? '?jumpto=' + props.timestamp : ''}`); + toast.success('Note URL copied to clipboard'); + }; + + const onDelete = async () => { + if ( + await confirm({ + header: 'Confirm', + confirmButton: 'Yes, delete', + confirmation: `Are you sure you want to delete this note?`, + }) + ) { + notesStore.deleteNote(props.noteId).then((r) => { + props.filterOutNote(props.noteId); + filterOutTimelineNote(props.noteId) + toast.success('Note deleted'); + }); + } + }; + const menuItems = [ + { icon: 'pencil', text: 'Edit', onClick: onEdit }, + { icon: 'link-45deg', text: 'Copy URL', onClick: onCopy }, + { icon: 'trash', text: 'Delete', onClick: onDelete }, + ]; + return ( +
    +
    +
    + +
    +
    +
    {props.userId}
    +
    + {formatTimeOrDate(props.date as unknown as number, timezone)} +
    +
    +
    + +
    +
    +
    {props.message}
    +
    +
    + {props.tags.length ? ( +
    + {props.tags.map((tag) => ( +
    + {tag} +
    + ))} +
    + ) : null} + {!props.isPublic ? null : ( + <> + Team + + )} +
    +
    +
    + ); +} + +export default observer(NoteEvent); diff --git a/frontend/app/components/Session_/Player/Controls/Timeline.js b/frontend/app/components/Session_/Player/Controls/Timeline.js index e53f567f0..8059629fd 100644 --- a/frontend/app/components/Session_/Player/Controls/Timeline.js +++ b/frontend/app/components/Session_/Player/Controls/Timeline.js @@ -1,5 +1,6 @@ import React from 'react'; import { connect } from 'react-redux'; +import { Icon } from 'UI' import { connectPlayer, Controls, toggleTimetravel } from 'Player'; import TimeTracker from './TimeTracker'; import stl from './timeline.module.css'; @@ -30,22 +31,12 @@ let debounceTooltipChange = () => null; disabled: state.cssLoading || state.messagesLoading || state.markedTargets, endTime: state.endTime, live: state.live, - logList: state.logList, - exceptionsList: state.exceptionsList, - resourceList: state.resourceList, - stackList: state.stackList, - fetchList: state.fetchList, + notes: state.notes, })) @connect( (state) => ({ issues: state.getIn(['sessions', 'current', 'issues']), startedAt: state.getIn(['sessions', 'current', 'startedAt']), - clickRageTime: - state.getIn(['sessions', 'current', 'clickRage']) && - state.getIn(['sessions', 'current', 'clickRageTime']), - returningLocationTime: - state.getIn(['sessions', 'current', 'returningLocation']) && - state.getIn(['sessions', 'current', 'returningLocationTime']), tooltipVisible: state.getIn(['sessions', 'timeLineTooltip', 'isVisible']), }), { setTimelinePointer, setTimelineHoverTime } @@ -170,7 +161,7 @@ export default class Timeline extends React.PureComponent { }; render() { - const { events, skip, skipIntervals, disabled, endTime, live } = this.props; + const { events, skip, skipIntervals, disabled, endTime, live, notes } = this.props; const scale = 100 / endTime; @@ -228,6 +219,20 @@ export default class Timeline extends React.PureComponent { style={{ left: `${getTimelinePosition(e.time, scale)}%` }} /> ))} + {notes.map((note) => note.timestamp > 0 ? ( +
    + +
    + ) : null)}
    ); diff --git a/frontend/app/components/Session_/Player/Controls/components/NoteTooltip.tsx b/frontend/app/components/Session_/Player/Controls/components/NoteTooltip.tsx index ad3de3079..a694cc4b5 100644 --- a/frontend/app/components/Session_/Player/Controls/components/NoteTooltip.tsx +++ b/frontend/app/components/Session_/Player/Controls/components/NoteTooltip.tsx @@ -2,72 +2,138 @@ import React from 'react'; import { Icon, Button, Checkbox } from 'UI'; import { Duration } from 'luxon'; import { connect } from 'react-redux'; +import { WriteNote, tagProps, TAGS, iTag, Note } from 'App/services/NotesService' +import { setNoteTooltip, addNote } from 'Duck/sessions'; import stl from './styles.module.css'; +import { useStore } from 'App/mstore' +import { toast } from 'react-toastify'; +import { injectNotes } from 'Player'; interface Props { - offset: number; isVisible: boolean; time: number; + setNoteTooltip: (state: any) => void + addNote: (note: Note) => void + sessionId: string } -const TAGS = ['QUERY', 'ISSUE', 'TASK', 'OTHER']; +function NoteTooltip({ isVisible, time, setNoteTooltip, sessionId, addNote }: Props) { + const [text, setText] = React.useState('') + const [isPublic, setPublic] = React.useState(false) + const [tags, setTags] = React.useState([]) + const [useTimestamp, setUseTs] = React.useState(false) + + const { notesStore } = useStore(); -function NoteTooltip({ offset, isVisible, time }: Props) { const duration = Duration.fromMillis(time).toFormat('mm:ss'); - const stopEvents = (e: any) => { e.stopPropagation(); }; + const onSubmit = () => { + const note: WriteNote = { + message: text, + tags, + timestamp: useTimestamp ? time : -1, + isPublic, + } + + notesStore.addNote(sessionId, note).then(r => { + toast.success('Note added') + notesStore.fetchSessionNotes(sessionId).then(notes => { + injectNotes(notes) + addNote(r) + }) + }).catch(e => { + toast.error('Error adding note') + console.error(e) + }).finally(() => { + setNoteTooltip({ isVisible: false, time: 0 }) + setText('') + setTags([]) + }) + } + + const closeTooltip = () => { + setNoteTooltip({ isVisible: false, time: 0 }) + } + + const tagActive = (tag: iTag) => tags.includes(tag) + const removeTag = (tag: iTag) => { + setTags(tags.filter(t => t !== tag)) + } + const addTag = (tag: iTag) => { + setTags([...tags, tag]) + } return (
    -
    +

    Add Note

    -
    - +
    setUseTs(!useTimestamp)}> + {`at ${duration}`}
    -
    +
    -
    text field
    +
    +