From b0dda1de6628470a917cb653d8eb3eb829efef4b Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 8 Mar 2022 17:14:20 +0100 Subject: [PATCH 01/60] feature(ui) - pagination wip --- .../LiveSessionList/LiveSessionList.tsx | 16 +++++++-- .../components/ui/Pagination/Pagination.tsx | 35 +++++++++++++++++++ .../app/components/ui/Pagination/index.ts | 1 + frontend/app/components/ui/index.js | 1 + frontend/app/svg/icons/chevron-left.svg | 3 ++ frontend/app/svg/icons/chevron-right.svg | 3 ++ 6 files changed, 57 insertions(+), 2 deletions(-) create mode 100644 frontend/app/components/ui/Pagination/Pagination.tsx create mode 100644 frontend/app/components/ui/Pagination/index.ts create mode 100644 frontend/app/svg/icons/chevron-left.svg create mode 100644 frontend/app/svg/icons/chevron-right.svg diff --git a/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx b/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx index cb503a745..7d2490149 100644 --- a/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx +++ b/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx @@ -1,7 +1,7 @@ import React, { useEffect } from 'react'; import { fetchLiveList } from 'Duck/sessions'; import { connect } from 'react-redux'; -import { NoContent, Loader, LoadMoreButton } from 'UI'; +import { NoContent, Loader, LoadMoreButton, Pagination } from 'UI'; import { List, Map } from 'immutable'; import SessionItem from 'Shared/SessionItem'; import withPermissions from 'HOCs/withPermissions' @@ -135,6 +135,13 @@ function LiveSessionList(props: Props) { props.updateSort({ order: state })} sortOrder={sort.order} /> +
+ null} + /> +
))} - */} + null} /> diff --git a/frontend/app/components/ui/Pagination/Pagination.tsx b/frontend/app/components/ui/Pagination/Pagination.tsx new file mode 100644 index 000000000..f61457bc5 --- /dev/null +++ b/frontend/app/components/ui/Pagination/Pagination.tsx @@ -0,0 +1,35 @@ +import React from 'react' +import { Icon } from 'UI' +import cn from 'classnames' + +interface Props { + page: number + totalPages: number + onPageChange: (page: number) => void + limit?: number +} +export default function Pagination(props: Props) { + const { page, totalPages, onPageChange, limit = 5 } = props; + return ( +
+ + Page + + of + {totalPages} + +
+ ) +} diff --git a/frontend/app/components/ui/Pagination/index.ts b/frontend/app/components/ui/Pagination/index.ts new file mode 100644 index 000000000..29c341d81 --- /dev/null +++ b/frontend/app/components/ui/Pagination/index.ts @@ -0,0 +1 @@ +export { default } from './Pagination'; \ No newline at end of file diff --git a/frontend/app/components/ui/index.js b/frontend/app/components/ui/index.js index 1e0088720..1152437cf 100644 --- a/frontend/app/components/ui/index.js +++ b/frontend/app/components/ui/index.js @@ -55,5 +55,6 @@ export { default as HighlightCode } from './HighlightCode'; export { default as NoPermission } from './NoPermission'; export { default as NoSessionPermission } from './NoSessionPermission'; export { default as HelpText } from './HelpText'; +export { default as Pagination } from './Pagination'; export { Input, Modal, Form, Message, Card } from 'semantic-ui-react'; diff --git a/frontend/app/svg/icons/chevron-left.svg b/frontend/app/svg/icons/chevron-left.svg new file mode 100644 index 000000000..018f8b673 --- /dev/null +++ b/frontend/app/svg/icons/chevron-left.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/frontend/app/svg/icons/chevron-right.svg b/frontend/app/svg/icons/chevron-right.svg new file mode 100644 index 000000000..d621289b3 --- /dev/null +++ b/frontend/app/svg/icons/chevron-right.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file From 3537b6c00f7a084c350426cc2332db31ed4d2478 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Wed, 9 Mar 2022 15:58:20 +0100 Subject: [PATCH 02/60] feat(ui) - pagination wip --- .../LiveSessionList/LiveSessionList.tsx | 23 ++++++++------ .../components/ui/Pagination/Pagination.tsx | 30 ++++++++++++++----- .../app/svg/icons/chevron-double-left.svg | 2 +- frontend/app/svg/icons/chevron-left.svg | 2 +- frontend/app/svg/icons/chevron-right.svg | 2 +- 5 files changed, 40 insertions(+), 19 deletions(-) diff --git a/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx b/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx index 7d2490149..c2f3c1b3b 100644 --- a/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx +++ b/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx @@ -42,9 +42,8 @@ function LiveSessionList(props: Props) { text: capitalize(i), value: i })).toJS(); - const displayedCount = Math.min(currentPage * PER_PAGE, sessions.size); - - const addPage = () => props.updateCurrentPage(props.currentPage + 1) + // const displayedCount = Math.min(currentPage * PER_PAGE, sessions.size); + // const addPage = () => props.updateCurrentPage(props.currentPage + 1) useEffect(() => { if (filters.size === 0) { @@ -108,6 +107,12 @@ function LiveSessionList(props: Props) { }, AUTOREFRESH_INTERVAL); } + const sliceListPerPage = (list, page) => { + const start = page * PER_PAGE; + const end = start + PER_PAGE; + return list.slice(start, end); + } + return (
@@ -138,8 +143,8 @@ function LiveSessionList(props: Props) {
null} + totalPages={Math.ceil(sessions.size / PER_PAGE)} + onPageChange={(page) => props.updateCurrentPage(page)} />
- {sessions && sessions.sortBy(i => i.metadata[sort.field]).update(list => { + {sessions && sliceListPerPage(sessions.sortBy(i => i.metadata[sort.field]).update(list => { return sort.order === 'desc' ? list.reverse() : list; - }).take(displayedCount).map(session => ( + }), currentPage - 1).map(session => ( */} - null} - /> + /> */}
diff --git a/frontend/app/components/ui/Pagination/Pagination.tsx b/frontend/app/components/ui/Pagination/Pagination.tsx index f61457bc5..11ae24c53 100644 --- a/frontend/app/components/ui/Pagination/Pagination.tsx +++ b/frontend/app/components/ui/Pagination/Pagination.tsx @@ -10,25 +10,41 @@ interface Props { } export default function Pagination(props: Props) { const { page, totalPages, onPageChange, limit = 5 } = props; + const [currentPage, setCurrentPage] = React.useState(page); + // const [] + + const changePage = (page: number) => { + if (page > 0 && page <= totalPages) { + onPageChange(page); + setCurrentPage(page); + } + } return (
Page - + changePage(parseInt(e.target.value))} + /> of {totalPages}
) diff --git a/frontend/app/svg/icons/chevron-double-left.svg b/frontend/app/svg/icons/chevron-double-left.svg index 7181fd111..8f30320c6 100644 --- a/frontend/app/svg/icons/chevron-double-left.svg +++ b/frontend/app/svg/icons/chevron-double-left.svg @@ -1,4 +1,4 @@ - + \ No newline at end of file diff --git a/frontend/app/svg/icons/chevron-left.svg b/frontend/app/svg/icons/chevron-left.svg index 018f8b673..919d877d2 100644 --- a/frontend/app/svg/icons/chevron-left.svg +++ b/frontend/app/svg/icons/chevron-left.svg @@ -1,3 +1,3 @@ - + \ No newline at end of file diff --git a/frontend/app/svg/icons/chevron-right.svg b/frontend/app/svg/icons/chevron-right.svg index d621289b3..67cb89d1a 100644 --- a/frontend/app/svg/icons/chevron-right.svg +++ b/frontend/app/svg/icons/chevron-right.svg @@ -1,3 +1,3 @@ - + \ No newline at end of file From ce33f1deb62244ea1f73815cb0c80a1d6123147c Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Wed, 9 Mar 2022 19:07:47 +0100 Subject: [PATCH 03/60] feat(ui) - pagination wip --- .../BugFinder/SessionList/SessionList.js | 51 +++++++++++-------- .../LiveSessionList/LiveSessionList.tsx | 34 ++++--------- frontend/app/duck/search.js | 13 +++++ frontend/app/duck/sessions.js | 2 + frontend/app/utils.js | 6 +++ 5 files changed, 61 insertions(+), 45 deletions(-) diff --git a/frontend/app/components/BugFinder/SessionList/SessionList.js b/frontend/app/components/BugFinder/SessionList/SessionList.js index 10db59c5b..e44610e1b 100644 --- a/frontend/app/components/BugFinder/SessionList/SessionList.js +++ b/frontend/app/components/BugFinder/SessionList/SessionList.js @@ -1,13 +1,14 @@ import { connect } from 'react-redux'; -import { Loader, NoContent, Button, LoadMoreButton } from 'UI'; +import { Loader, NoContent, Button, LoadMoreButton, Pagination } from 'UI'; import { applyFilter, addAttribute, addEvent } from 'Duck/filters'; -import { fetchSessions, addFilterByKeyAndValue } from 'Duck/search'; +import { fetchSessions, addFilterByKeyAndValue, updateCurrentPage } from 'Duck/search'; import SessionItem from 'Shared/SessionItem'; import SessionListHeader from './SessionListHeader'; import { FilterKey } from 'Types/filter/filterType'; +import { sliceListPerPage } from 'App/utils'; const ALL = 'all'; -const PER_PAGE = 10; +const PER_PAGE = 5; const AUTOREFRESH_INTERVAL = 3 * 60 * 1000; var timeoutId; @@ -20,12 +21,14 @@ var timeoutId; total: state.getIn([ 'sessions', 'total' ]), filters: state.getIn([ 'search', 'instance', 'filters' ]), metaList: state.getIn(['customFields', 'list']).map(i => i.key), + currentPage: state.getIn([ 'search', 'currentPage' ]), }), { applyFilter, addAttribute, addEvent, fetchSessions, addFilterByKeyAndValue, + updateCurrentPage, }) export default class SessionList extends React.PureComponent { state = { @@ -76,6 +79,8 @@ export default class SessionList extends React.PureComponent { clearTimeout(timeoutId) } + + renderActiveTabContent(list) { const { loading, @@ -84,6 +89,7 @@ export default class SessionList extends React.PureComponent { allList, activeTab, metaList, + currentPage, } = this.props; const _filterKeys = filters.map(i => i.key); const hasUserFilter = _filterKeys.includes(FilterKey.USERID) || _filterKeys.includes(FilterKey.USERANONYMOUSID); @@ -93,28 +99,28 @@ export default class SessionList extends React.PureComponent { return ( -
Please try changing your search parameters.
- {allList.size > 0 && ( -
- However, we found other sessions based on your search parameters. -
- +
+
Please try changing your search parameters.
+ {allList.size > 0 && ( +
+ However, we found other sessions based on your search parameters. +
+ +
-
- )} -
+ )} +
} > - { list.take(displayedCount).map(session => ( + { sliceListPerPage(list, currentPage, PER_PAGE).map(session => ( ))} - this.props.updateCurrentPage(page)} + /> + {/* Try being a bit more specific by setting a specific time frame or simply use different filters
} - /> + /> */} ); } diff --git a/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx b/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx index c2f3c1b3b..16aa8772c 100644 --- a/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx +++ b/frontend/app/components/shared/LiveSessionList/LiveSessionList.tsx @@ -12,7 +12,7 @@ import { addFilterByKeyAndValue, updateCurrentPage, updateSort } from 'Duck/live import DropdownPlain from 'Shared/DropdownPlain'; import SortOrderButton from 'Shared/SortOrderButton'; import { TimezoneDropdown } from 'UI'; -import { capitalize } from 'App/utils'; +import { capitalize, sliceListPerPage } from 'App/utils'; import LiveSessionReloadButton from 'Shared/LiveSessionReloadButton'; const AUTOREFRESH_INTERVAL = .5 * 60 * 1000 @@ -107,12 +107,6 @@ function LiveSessionList(props: Props) { }, AUTOREFRESH_INTERVAL); } - const sliceListPerPage = (list, page) => { - const start = page * PER_PAGE; - const end = start + PER_PAGE; - return list.slice(start, end); - } - return (
@@ -140,13 +134,7 @@ function LiveSessionList(props: Props) { props.updateSort({ order: state })} sortOrder={sort.order} />
-
- props.updateCurrentPage(page)} - /> -
+ ))} - {/* */} - {/* null} - /> */} +
+ props.updateCurrentPage(page)} + /> +
diff --git a/frontend/app/duck/search.js b/frontend/app/duck/search.js index 3d15ae950..15011dafb 100644 --- a/frontend/app/duck/search.js +++ b/frontend/app/duck/search.js @@ -28,6 +28,7 @@ const CLEAR_SEARCH = `${name}/CLEAR_SEARCH`; const UPDATE = `${name}/UPDATE`; const APPLY = `${name}/APPLY`; const SET_ALERT_METRIC_ID = `${name}/SET_ALERT_METRIC_ID`; +const UPDATE_CURRENT_PAGE = `${name}/UPDATE_CURRENT_PAGE`; const REFRESH_FILTER_OPTIONS = 'filters/REFRESH_FILTER_OPTIONS'; @@ -49,6 +50,7 @@ const initialState = Map({ instance: new Filter({ filters: [] }), savedSearch: new SavedFilter({}), filterSearchList: {}, + currentPage: 1, }); // Metric - Series - [] - filters @@ -83,6 +85,8 @@ function reducer(state = initialState, action = {}) { return state.set('savedSearch', action.filter); case EDIT_SAVED_SEARCH: return state.mergeIn([ 'savedSearch' ], action.instance); + case UPDATE_CURRENT_PAGE: + return state.set('currentPage', action.page); } return state; } @@ -122,6 +126,8 @@ const reduceThenFetchResource = actionCreator => (...args) => (dispatch, getStat dispatch(actionCreator(...args)); const filter = getState().getIn([ 'search', 'instance']).toData(); filter.filters = filter.filters.map(filterMap); + filter.limit = 5; + filter.page = getState().getIn([ 'search', 'currentPage']); return isRoute(ERRORS_ROUTE, window.location.pathname) ? dispatch(fetchErrorsList(filter)) @@ -268,4 +274,11 @@ export const refreshFilterOptions = () => { return { type: REFRESH_FILTER_OPTIONS } +} + +export function updateCurrentPage(page) { + return { + type: UPDATE_CURRENT_PAGE, + page, + }; } \ No newline at end of file diff --git a/frontend/app/duck/sessions.js b/frontend/app/duck/sessions.js index f3df333c7..7eedce57a 100644 --- a/frontend/app/duck/sessions.js +++ b/frontend/app/duck/sessions.js @@ -60,6 +60,7 @@ const initialState = Map({ funnelPage: Map(), timelinePointer: null, sessionPath: '', + total: 0, }); const reducer = (state = initialState, action = {}) => { @@ -129,6 +130,7 @@ const reducer = (state = initialState, action = {}) => { .set('favoriteList', list.filter(({ favorite }) => favorite)) .set('total', total) .set('keyMap', keyMap) + .set('total', total) .set('wdTypeCount', wdTypeCount); case SET_AUTOPLAY_VALUES: { const sessionIds = state.get('sessionIds') diff --git a/frontend/app/utils.js b/frontend/app/utils.js index ca7c19b4f..5ea05633c 100644 --- a/frontend/app/utils.js +++ b/frontend/app/utils.js @@ -232,4 +232,10 @@ export const isGreaterOrEqualVersion = (version, compareTo) => { const [major, minor, patch] = version.split("-")[0].split('.'); const [majorC, minorC, patchC] = compareTo.split("-")[0].split('.'); return (major > majorC) || (major === majorC && minor > minorC) || (major === majorC && minor === minorC && patch >= patchC); +} + +export const sliceListPerPage = (list, page, perPage = 10) => { + const start = page * perPage; + const end = start + perPage; + return list.slice(start, end); } \ No newline at end of file From 09aca1e61d0d91c41c93502b843999cb7774d593 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Thu, 10 Mar 2022 17:26:07 +0100 Subject: [PATCH 04/60] feat(ui) - pagination wip --- .../app/components/BugFinder/BugFinder.js | 10 ++-- .../BugFinder/SessionList/SessionList.js | 18 ++++--- .../SessionList/SessionListHeader.js | 2 +- .../BugFinder/SessionsMenu/SessionsMenu.js | 50 +++---------------- .../components/ui/Pagination/Pagination.tsx | 9 +++- frontend/app/constants/filterOptions.js | 26 +++++----- frontend/app/duck/search.js | 37 ++++++++++---- frontend/app/duck/sessions.js | 2 - frontend/app/styles/colors-autogen.css | 32 +++++++++++- frontend/app/types/filter/filterType.ts | 15 ++++++ frontend/app/utils.js | 1 + frontend/scripts/colors.js | 4 +- 12 files changed, 123 insertions(+), 83 deletions(-) diff --git a/frontend/app/components/BugFinder/BugFinder.js b/frontend/app/components/BugFinder/BugFinder.js index 6d30359f1..326a1e78e 100644 --- a/frontend/app/components/BugFinder/BugFinder.js +++ b/frontend/app/components/BugFinder/BugFinder.js @@ -13,7 +13,8 @@ import withLocationHandlers from "HOCs/withLocationHandlers"; import { fetch as fetchFilterVariables } from 'Duck/sources'; import { fetchSources } from 'Duck/customField'; import { RehydrateSlidePanel } from './WatchDogs/components'; -import { setActiveTab, setFunnelPage } from 'Duck/sessions'; +import { setFunnelPage } from 'Duck/sessions'; +import { setActiveTab } from 'Duck/search'; import SessionsMenu from './SessionsMenu/SessionsMenu'; import { LAST_7_DAYS } from 'Types/app/period'; import { resetFunnel } from 'Duck/funnels'; @@ -51,12 +52,12 @@ const allowedQueryKeys = [ variables: state.getIn([ 'customFields', 'list' ]), sources: state.getIn([ 'customFields', 'sources' ]), filterValues: state.get('filterValues'), - activeTab: state.getIn([ 'sessions', 'activeTab' ]), favoriteList: state.getIn([ 'sessions', 'favoriteList' ]), currentProjectId: state.getIn([ 'user', 'siteId' ]), sites: state.getIn([ 'site', 'list' ]), watchdogs: state.getIn(['watchdogs', 'list']), activeFlow: state.getIn([ 'filters', 'activeFlow' ]), + sessions: state.getIn([ 'sessions', 'list' ]), }), { fetchFavoriteSessionList, applyFilter, @@ -91,7 +92,9 @@ export default class BugFinder extends React.PureComponent { // keys: this.props.sources.filter(({type}) => type === 'logTool').map(({ label, key }) => ({ type: 'ERROR', source: key, label: label, key, icon: 'integrations/' + key, isFilter: false })).toJS() // }; // }); - props.fetchSessions(); + if (props.sessions.size === 0) { + props.fetchSessions(); + } props.resetFunnel(); props.resetFunnelFilters(); props.fetchFunnelsList(LAST_7_DAYS) @@ -115,7 +118,6 @@ export default class BugFinder extends React.PureComponent { } render() { - const { activeFlow, activeTab } = this.props; const { showRehydratePanel } = this.state; return ( diff --git a/frontend/app/components/BugFinder/SessionList/SessionList.js b/frontend/app/components/BugFinder/SessionList/SessionList.js index e44610e1b..a58d483ae 100644 --- a/frontend/app/components/BugFinder/SessionList/SessionList.js +++ b/frontend/app/components/BugFinder/SessionList/SessionList.js @@ -5,7 +5,6 @@ import { fetchSessions, addFilterByKeyAndValue, updateCurrentPage } from 'Duck/s import SessionItem from 'Shared/SessionItem'; import SessionListHeader from './SessionListHeader'; import { FilterKey } from 'Types/filter/filterType'; -import { sliceListPerPage } from 'App/utils'; const ALL = 'all'; const PER_PAGE = 5; @@ -16,7 +15,7 @@ var timeoutId; shouldAutorefresh: state.getIn([ 'filters', 'appliedFilter', 'events' ]).size === 0, savedFilters: state.getIn([ 'filters', 'list' ]), loading: state.getIn([ 'sessions', 'loading' ]), - activeTab: state.getIn([ 'sessions', 'activeTab' ]), + activeTab: state.getIn([ 'search', 'activeTab' ]), allList: state.getIn([ 'sessions', 'list' ]), total: state.getIn([ 'sessions', 'total' ]), filters: state.getIn([ 'search', 'instance', 'filters' ]), @@ -90,6 +89,7 @@ export default class SessionList extends React.PureComponent { activeTab, metaList, currentPage, + total, } = this.props; const _filterKeys = filters.map(i => i.key); const hasUserFilter = _filterKeys.includes(FilterKey.USERID) || _filterKeys.includes(FilterKey.USERANONYMOUSID); @@ -120,7 +120,7 @@ export default class SessionList extends React.PureComponent { } > - { sliceListPerPage(list, currentPage, PER_PAGE).map(session => ( + { list.map(session => ( ))} - this.props.updateCurrentPage(page)} - /> +
+ this.props.updateCurrentPage(page)} + /> +
{/* ({ - activeTab: state.getIn([ 'sessions', 'activeTab' ]), + activeTab: state.getIn([ 'search', 'activeTab' ]), }), { applyFilter })(SessionListHeader); diff --git a/frontend/app/components/BugFinder/SessionsMenu/SessionsMenu.js b/frontend/app/components/BugFinder/SessionsMenu/SessionsMenu.js index be98a28cd..fa0594316 100644 --- a/frontend/app/components/BugFinder/SessionsMenu/SessionsMenu.js +++ b/frontend/app/components/BugFinder/SessionsMenu/SessionsMenu.js @@ -1,30 +1,20 @@ -import React, { useEffect } from 'react' +import React from 'react' import { connect } from 'react-redux'; import cn from 'classnames'; -import { SideMenuitem, SavedSearchList, Progress, Popup, Icon, CircularLoader } from 'UI' +import { SideMenuitem, SavedSearchList, Progress, Popup } from 'UI' import stl from './sessionMenu.css'; import { fetchWatchdogStatus } from 'Duck/watchdogs'; -import { setActiveFlow, clearEvents } from 'Duck/filters'; -import { setActiveTab } from 'Duck/sessions'; +import { clearEvents } from 'Duck/filters'; import { issues_types } from 'Types/session/issue' import { fetchList as fetchSessionList } from 'Duck/sessions'; function SessionsMenu(props) { - const { - activeFlow, activeTab, watchdogs = [], keyMap, wdTypeCount, - fetchWatchdogStatus, toggleRehydratePanel, filters, sessionsLoading } = props; + const { activeTab, keyMap, wdTypeCount, toggleRehydratePanel } = props; const onMenuItemClick = (filter) => { props.onMenuItemClick(filter) - - if (activeFlow && activeFlow.type === 'flows') { - props.setActiveFlow(null) - } } - - // useEffect(() => { - // fetchWatchdogStatus() - // }, []) + const capturingAll = props.captureRate && props.captureRate.get('captureAll'); @@ -66,36 +56,13 @@ function SessionsMenu(props) { { issues_types.filter(item => item.visible).map(item => ( onMenuItemClick(item)} /> ))} - {/*
-
- -
Assist
- { activeTab.type === 'live' && ( -
!sessionsLoading && props.fetchSessionList(filters.toJS())} - > - { sessionsLoading ? : } -
- )} -
- } - iconName="person" - active={activeTab.type === 'live'} - onClick={() => onMenuItemClick({ name: 'Assist', type: 'live' })} - /> - -
*/} -
({ - activeTab: state.getIn([ 'sessions', 'activeTab' ]), + activeTab: state.getIn([ 'search', 'activeTab' ]), keyMap: state.getIn([ 'sessions', 'keyMap' ]), wdTypeCount: state.getIn([ 'sessions', 'wdTypeCount' ]), - activeFlow: state.getIn([ 'filters', 'activeFlow' ]), captureRate: state.getIn(['watchdogs', 'captureRate']), filters: state.getIn([ 'filters', 'appliedFilter' ]), sessionsLoading: state.getIn([ 'sessions', 'fetchLiveListRequest', 'loading' ]), }), { - fetchWatchdogStatus, setActiveFlow, clearEvents, setActiveTab, fetchSessionList + fetchWatchdogStatus, clearEvents, fetchSessionList })(SessionsMenu); diff --git a/frontend/app/components/ui/Pagination/Pagination.tsx b/frontend/app/components/ui/Pagination/Pagination.tsx index 11ae24c53..5e1b422d3 100644 --- a/frontend/app/components/ui/Pagination/Pagination.tsx +++ b/frontend/app/components/ui/Pagination/Pagination.tsx @@ -10,8 +10,12 @@ interface Props { } export default function Pagination(props: Props) { const { page, totalPages, onPageChange, limit = 5 } = props; + const [currentPage, setCurrentPage] = React.useState(page); - // const [] + React.useMemo( + () => setCurrentPage(page), + [page], + ); const changePage = (page: number) => { if (page > 0 && page <= totalPages) { @@ -19,6 +23,7 @@ export default function Pagination(props: Props) { setCurrentPage(page); } } + return (
{/* void - limit?: number + limit?: number } export default function Pagination(props: Props) { const { page, totalPages, onPageChange, limit = 5 } = props; - const [currentPage, setCurrentPage] = React.useState(page); React.useMemo( () => setCurrentPage(page), [page], ); + const debounceChange = React.useCallback(debounce(onPageChange, 1000), []); + const changePage = (page: number) => { if (page > 0 && page <= totalPages) { - onPageChange(page); setCurrentPage(page); + debounceChange(page); } } - + + const isFirstPage = currentPage === 1; + const isLastPage = currentPage === totalPages; return (
Page of {totalPages}
) From 45857a804b1197e60236640c6b1601df91bacb4b Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Thu, 10 Mar 2022 18:15:27 +0100 Subject: [PATCH 07/60] feat(ui) - pagination tooltips :) --- .../components/ui/Pagination/Pagination.tsx | 41 +++++++++++++------ 1 file changed, 29 insertions(+), 12 deletions(-) diff --git a/frontend/app/components/ui/Pagination/Pagination.tsx b/frontend/app/components/ui/Pagination/Pagination.tsx index 4a8e3f1d5..851b35161 100644 --- a/frontend/app/components/ui/Pagination/Pagination.tsx +++ b/frontend/app/components/ui/Pagination/Pagination.tsx @@ -2,6 +2,7 @@ import React from 'react' import { Icon } from 'UI' import cn from 'classnames' import { debounce } from 'App/utils'; +import { Tooltip } from 'react-tippy'; interface Props { page: number totalPages: number @@ -29,13 +30,21 @@ export default function Pagination(props: Props) { const isLastPage = currentPage === totalPages; return (
- + + Page of {totalPages} - + +
) } From 7e29cde06fbf9228bae69d910efdedaa9ef8b681 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Thu, 10 Mar 2022 18:22:19 +0100 Subject: [PATCH 08/60] feat(ui) - pagination limit set --- frontend/app/components/BugFinder/SessionList/SessionList.js | 3 ++- .../components/shared/LiveSessionList/LiveSessionList.tsx | 3 ++- frontend/app/components/ui/Pagination/Pagination.tsx | 5 +++-- frontend/app/duck/search.js | 2 +- 4 files changed, 8 insertions(+), 5 deletions(-) diff --git a/frontend/app/components/BugFinder/SessionList/SessionList.js b/frontend/app/components/BugFinder/SessionList/SessionList.js index 5a76dcffa..858e9cb30 100644 --- a/frontend/app/components/BugFinder/SessionList/SessionList.js +++ b/frontend/app/components/BugFinder/SessionList/SessionList.js @@ -7,7 +7,7 @@ import SessionListHeader from './SessionListHeader'; import { FilterKey } from 'Types/filter/filterType'; const ALL = 'all'; -const PER_PAGE = 5; +const PER_PAGE = 10; const AUTOREFRESH_INTERVAL = 3 * 60 * 1000; var timeoutId; @@ -136,6 +136,7 @@ export default class SessionList extends React.PureComponent { totalPages={Math.ceil(total / PER_PAGE)} onPageChange={(page) => this.props.updateCurrentPage(page)} limit={PER_PAGE} + debounceRequest={1000} />
{/* props.updateCurrentPage(page)} + limit={PER_PAGE} />
diff --git a/frontend/app/components/ui/Pagination/Pagination.tsx b/frontend/app/components/ui/Pagination/Pagination.tsx index 851b35161..0e552ea69 100644 --- a/frontend/app/components/ui/Pagination/Pagination.tsx +++ b/frontend/app/components/ui/Pagination/Pagination.tsx @@ -8,16 +8,17 @@ interface Props { totalPages: number onPageChange: (page: number) => void limit?: number + debounceRequest?: number } export default function Pagination(props: Props) { - const { page, totalPages, onPageChange, limit = 5 } = props; + const { page, totalPages, onPageChange, limit = 5, debounceRequest = 0 } = props; const [currentPage, setCurrentPage] = React.useState(page); React.useMemo( () => setCurrentPage(page), [page], ); - const debounceChange = React.useCallback(debounce(onPageChange, 1000), []); + const debounceChange = React.useCallback(debounce(onPageChange, debounceRequest), []); const changePage = (page: number) => { if (page > 0 && page <= totalPages) { diff --git a/frontend/app/duck/search.js b/frontend/app/duck/search.js index dac47f6b7..f4e480b48 100644 --- a/frontend/app/duck/search.js +++ b/frontend/app/duck/search.js @@ -142,7 +142,7 @@ const reduceThenFetchResource = actionCreator => (...args) => (dispatch, getStat } filter.filters = filter.filters.map(filterMap); - filter.limit = 5; + filter.limit = 10; filter.page = getState().getIn([ 'search', 'currentPage']); return isRoute(ERRORS_ROUTE, window.location.pathname) From bfffc8ee4cc52bb1ac9b47ef0ea0d3aa46b50285 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 10 Mar 2022 20:28:01 +0100 Subject: [PATCH 09/60] feat(db): autocomplete partial indexes changes feat(api): autocomplete startsWith if len<3 --- api/chalicelib/core/events.py | 385 +++++++++++------- api/chalicelib/core/sessions_metas.py | 125 ++---- .../utils/event_filter_definition.py | 7 +- api/routers/core.py | 2 +- .../db/init_dbs/postgresql/1.5.4/1.5.4.sql | 30 +- .../db/init_dbs/postgresql/1.5.4/1.5.4.sql | 30 +- 6 files changed, 305 insertions(+), 274 deletions(-) diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index 7abaa4fe9..a8c43e398 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -97,7 +97,55 @@ def __get_data_for_extend(data): return data["data"] -def __pg_errors_query(source=None): +def __pg_errors_query(source=None, value_length=None): + if value_length is None or value_length > 2: + return f"""((SELECT DISTINCT ON(lg.message) + lg.message AS value, + source, + '{event_type.ERROR.ui_type}' AS type + FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.message ILIKE %(svalue)s + AND lg.project_id = %(project_id)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + source, + '{event_type.ERROR.ui_type}' AS type + FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.name ILIKE %(svalue)s + AND lg.project_id = %(project_id)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5) + UNION + (SELECT DISTINCT ON(lg.message) + lg.message AS value, + source, + '{event_type.ERROR.ui_type}' AS type + FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.message ILIKE %(value)s + AND lg.project_id = %(project_id)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + source, + '{event_type.ERROR.ui_type}' AS type + FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.name ILIKE %(value)s + AND lg.project_id = %(project_id)s + {"AND source = %(source)s" if source is not None else ""} + LIMIT 5));""" return f"""((SELECT DISTINCT ON(lg.message) lg.message AS value, source, @@ -120,30 +168,6 @@ def __pg_errors_query(source=None): AND lg.name ILIKE %(svalue)s AND lg.project_id = %(project_id)s {"AND source = %(source)s" if source is not None else ""} - LIMIT 5) - UNION - (SELECT DISTINCT ON(lg.message) - lg.message AS value, - source, - '{event_type.ERROR.ui_type}' AS type - FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.message ILIKE %(value)s - AND lg.project_id = %(project_id)s - {"AND source = %(source)s" if source is not None else ""} - LIMIT 5) - UNION ALL - (SELECT DISTINCT ON(lg.name) - lg.name AS value, - source, - '{event_type.ERROR.ui_type}' AS type - FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.name ILIKE %(value)s - AND lg.project_id = %(project_id)s - {"AND source = %(source)s" if source is not None else ""} LIMIT 5));""" @@ -152,9 +176,12 @@ def __search_pg_errors(project_id, value, key=None, source=None): with pg_client.PostgresClient() as cur: cur.execute( - cur.mogrify(__pg_errors_query(source), {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value), - "source": source})) + cur.mogrify(__pg_errors_query(source, + value_length=len(value) \ + if SUPPORTED_TYPES[event_type.ERROR.ui_type].change_by_length else None), + {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value), + "source": source})) results = helper.list_to_camel_case(cur.fetchall()) print(f"{TimeUTC.now() - now} : errors") return results @@ -162,26 +189,69 @@ def __search_pg_errors(project_id, value, key=None, source=None): def __search_pg_errors_ios(project_id, value, key=None, source=None): now = TimeUTC.now() + if SUPPORTED_TYPES[event_type.ERROR_IOS.ui_type].change_by_length is False or len(value) > 2: + query = f"""(SELECT DISTINCT ON(lg.reason) + lg.reason AS value, + '{event_type.ERROR_IOS.ui_type}' AS type + FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.reason ILIKE %(svalue)s + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + '{event_type.ERROR_IOS.ui_type}' AS type + FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.name ILIKE %(svalue)s + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.reason) + lg.reason AS value, + '{event_type.ERROR_IOS.ui_type}' AS type + FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.reason ILIKE %(value)s + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + '{event_type.ERROR_IOS.ui_type}' AS type + FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.name ILIKE %(value)s + LIMIT 5);""" + else: + query = f"""(SELECT DISTINCT ON(lg.reason) + lg.reason AS value, + '{event_type.ERROR_IOS.ui_type}' AS type + FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.reason ILIKE %(svalue)s + LIMIT 5) + UNION ALL + (SELECT DISTINCT ON(lg.name) + lg.name AS value, + '{event_type.ERROR_IOS.ui_type}' AS type + FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) + WHERE + s.project_id = %(project_id)s + AND lg.project_id = %(project_id)s + AND lg.name ILIKE %(svalue)s + LIMIT 5);""" with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify(f"""(SELECT DISTINCT ON(lg.reason) - lg.reason AS value, - '{event_type.ERROR_IOS.ui_type}' AS type - FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.reason ILIKE %(value)s - LIMIT 5) - UNION ALL - (SELECT DISTINCT ON(lg.name) - lg.name AS value, - '{event_type.ERROR_IOS.ui_type}' AS type - FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id) - WHERE - s.project_id = %(project_id)s - AND lg.name ILIKE %(value)s - LIMIT 5);""", - {"project_id": project_id, "value": helper.string_to_sql_like(value)})) + cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value)})) results = helper.list_to_camel_case(cur.fetchall()) print(f"{TimeUTC.now() - now} : errors") return results @@ -198,42 +268,82 @@ def __search_pg_metadata(project_id, value, key=None, source=None): for k in meta_keys.keys(): colname = metadata.index_to_colname(meta_keys[k]) - sub_from.append( - f"(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key FROM public.sessions WHERE project_id = %(project_id)s AND {colname} ILIKE %(value)s LIMIT 5)") + if SUPPORTED_TYPES[event_type.METADATA.ui_type].change_by_length is False or len(value) > 2: + sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key + FROM public.sessions + WHERE project_id = %(project_id)s + AND {colname} ILIKE %(svalue)s LIMIT 5) + UNION + (SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key + FROM public.sessions + WHERE project_id = %(project_id)s + AND {colname} ILIKE %(value)s LIMIT 5)) + """) + else: + sub_from.append(f"""(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key + FROM public.sessions + WHERE project_id = %(project_id)s + AND {colname} ILIKE %(svalue)s LIMIT 5)""") with pg_client.PostgresClient() as cur: + print(cur.mogrify(f"""\ + SELECT key, value, 'METADATA' AS TYPE + FROM({" UNION ALL ".join(sub_from)}) AS all_metas + LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value)})) cur.execute(cur.mogrify(f"""\ SELECT key, value, 'METADATA' AS TYPE FROM({" UNION ALL ".join(sub_from)}) AS all_metas - LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value)})) + LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value)})) results = helper.list_to_camel_case(cur.fetchall()) return results -def __generic_query(typename): - return f"""\ - (SELECT value, type - FROM public.autocomplete - WHERE - project_id = %(project_id)s - AND type='{typename}' - AND value ILIKE %(svalue)s - LIMIT 5) - UNION - (SELECT value, type - FROM public.autocomplete - WHERE - project_id = %(project_id)s - AND type='{typename}' - AND value ILIKE %(value)s - LIMIT 5)""" +def __generic_query(typename, value_length=None): + if value_length is None or value_length > 2: + return f"""(SELECT value, type + FROM public.autocomplete + WHERE + project_id = %(project_id)s + AND type='{typename}' + AND value ILIKE %(svalue)s + LIMIT 5) + UNION + (SELECT value, type + FROM public.autocomplete + WHERE + project_id = %(project_id)s + AND type='{typename}' + AND value ILIKE %(value)s + LIMIT 5);""" + return f"""SELECT value, type + FROM public.autocomplete + WHERE + project_id = %(project_id)s + AND type='{typename}' + AND value ILIKE %(svalue)s + LIMIT 10;""" def __generic_autocomplete(event: Event): def f(project_id, value, key=None, source=None): with pg_client.PostgresClient() as cur: - cur.execute(cur.mogrify(__generic_query(event.ui_type), - {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value)})) + print( + cur.mogrify( + __generic_query(event.ui_type, + value_length=len(value) \ + if SUPPORTED_TYPES[event.ui_type].change_by_length \ + else None), + {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value)})) + cur.execute( + cur.mogrify( + __generic_query(event.ui_type, + value_length=len(value) \ + if SUPPORTED_TYPES[event.ui_type].change_by_length \ + else None), + {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value)})) return helper.list_to_camel_case(cur.fetchall()) return f @@ -263,95 +373,47 @@ class event_type: SUPPORTED_TYPES = { event_type.CLICK.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CLICK), query=__generic_query(typename=event_type.CLICK.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), event_type.INPUT.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.INPUT), query=__generic_query(typename=event_type.INPUT.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), event_type.LOCATION.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.LOCATION), query=__generic_query(typename=event_type.LOCATION.ui_type), - value_limit=3, - starts_with="/", - starts_limit=3, - ignore_if_starts_with=[]), + change_by_length=True), event_type.CUSTOM.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CUSTOM), query=__generic_query(typename=event_type.CUSTOM.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=[""]), + change_by_length=True), event_type.REQUEST.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.REQUEST), query=__generic_query(typename=event_type.REQUEST.ui_type), - value_limit=3, - starts_with="/", - starts_limit=3, - ignore_if_starts_with=[""]), + change_by_length=True), event_type.GRAPHQL.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.GRAPHQL), query=__generic_query(typename=event_type.GRAPHQL.ui_type), - value_limit=3, - starts_with="/", - starts_limit=4, - ignore_if_starts_with=[]), + change_by_length=True), event_type.STATEACTION.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.STATEACTION), query=__generic_query(typename=event_type.STATEACTION.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=[]), + change_by_length=True), event_type.ERROR.ui_type: SupportedFilter(get=__search_pg_errors, - query=None, - value_limit=4, - starts_with="", - starts_limit=4, - ignore_if_starts_with=["/"]), + query=None, change_by_length=True), event_type.METADATA.ui_type: SupportedFilter(get=__search_pg_metadata, - query=None, - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + query=None, change_by_length=True), # IOS event_type.CLICK_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CLICK_IOS), query=__generic_query(typename=event_type.CLICK_IOS.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), event_type.INPUT_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.INPUT_IOS), query=__generic_query(typename=event_type.INPUT_IOS.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), event_type.VIEW_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.VIEW_IOS), query=__generic_query(typename=event_type.VIEW_IOS.ui_type), - value_limit=3, - starts_with="/", - starts_limit=3, - ignore_if_starts_with=[]), + change_by_length=True), event_type.CUSTOM_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CUSTOM_IOS), query=__generic_query(typename=event_type.CUSTOM_IOS.ui_type), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=[""]), + change_by_length=True), event_type.REQUEST_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.REQUEST_IOS), query=__generic_query(typename=event_type.REQUEST_IOS.ui_type), - value_limit=3, - starts_with="/", - starts_limit=3, - ignore_if_starts_with=[""]), - event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors, - query=None, - value_limit=4, - starts_with="", - starts_limit=4, - ignore_if_starts_with=["/"]), + change_by_length=True), + event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors_ios, + query=None, change_by_length=True), } @@ -369,36 +431,49 @@ def __get_merged_queries(queries, value, project_id): def __get_autocomplete_table(value, project_id): with pg_client.PostgresClient() as cur: - cur.execute(cur.mogrify("""SELECT DISTINCT ON(value,type) project_id, value, type - FROM (SELECT project_id, type, value - FROM (SELECT *, - ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID - FROM public.autocomplete - WHERE project_id = %(project_id)s - AND value ILIKE %(svalue)s - UNION - SELECT *, - ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID - FROM public.autocomplete - WHERE project_id = %(project_id)s - AND value ILIKE %(value)s) AS u - WHERE Row_ID <= 5) AS sfa - ORDER BY sfa.type;""", - {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value)})) + if len(value) > 2: + query = """SELECT DISTINCT ON(value,type) project_id, value, type + FROM (SELECT project_id, type, value + FROM (SELECT *, + ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID + FROM public.autocomplete + WHERE project_id = %(project_id)s + AND value ILIKE %(svalue)s + UNION + SELECT *, + ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID + FROM public.autocomplete + WHERE project_id = %(project_id)s + AND value ILIKE %(value)s) AS u + WHERE Row_ID <= 5) AS sfa + ORDER BY sfa.type;""" + else: + query = """SELECT DISTINCT ON(value,type) project_id, value, type + FROM (SELECT project_id, type, value + FROM (SELECT *, + ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID + FROM public.autocomplete + WHERE project_id = %(project_id)s + AND value ILIKE %(svalue)s) AS u + WHERE Row_ID <= 5) AS sfa + ORDER BY sfa.type;""" + query = cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value)}) + cur.execute(query) results = helper.list_to_camel_case(cur.fetchall()) return results -def search_pg2(text, event_type, project_id, source, key): +def search(text, event_type, project_id, source, key): if not event_type: return {"data": __get_autocomplete_table(text, project_id)} if event_type in SUPPORTED_TYPES.keys(): rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source) - if event_type + "_IOS" in SUPPORTED_TYPES.keys(): - rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, - source=source) + # for IOS events autocomplete + # if event_type + "_IOS" in SUPPORTED_TYPES.keys(): + # rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, + # source=source) elif event_type + "_IOS" in SUPPORTED_TYPES.keys(): rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, source=source) diff --git a/api/chalicelib/core/sessions_metas.py b/api/chalicelib/core/sessions_metas.py index 1d342d03f..d605e5e1c 100644 --- a/api/chalicelib/core/sessions_metas.py +++ b/api/chalicelib/core/sessions_metas.py @@ -80,17 +80,16 @@ def get_top_key_values(project_id): return helper.dict_to_CAPITAL_keys(row) -def __generic_query(typename): - return f"""\ - SELECT value, type - FROM ((SELECT value, type - FROM public.autocomplete - WHERE - project_id = %(project_id)s - AND type ='{typename}' - AND value ILIKE %(svalue)s - ORDER BY value - LIMIT 5) +def __generic_query(typename, value_length=None): + if value_length is None or value_length > 2: + return f""" (SELECT value, type + FROM public.autocomplete + WHERE + project_id = %(project_id)s + AND type ='{typename}' + AND value ILIKE %(svalue)s + ORDER BY value + LIMIT 5) UNION (SELECT value, type FROM public.autocomplete @@ -99,13 +98,23 @@ def __generic_query(typename): AND type ='{typename}' AND value ILIKE %(value)s ORDER BY value - LIMIT 5)) AS met""" + LIMIT 5);""" + return f""" SELECT value, type + FROM public.autocomplete + WHERE + project_id = %(project_id)s + AND type ='{typename}' + AND value ILIKE %(svalue)s + ORDER BY value + LIMIT 10;""" def __generic_autocomplete(typename): def f(project_id, text): with pg_client.PostgresClient() as cur: - query = cur.mogrify(__generic_query(typename), + query = cur.mogrify(__generic_query(typename, + value_length=len(text) \ + if SUPPORTED_TYPES[typename].change_by_length else None), {"project_id": project_id, "value": helper.string_to_sql_like(text), "svalue": helper.string_to_sql_like("^" + text)}) @@ -120,124 +129,73 @@ SUPPORTED_TYPES = { schemas.FilterType.user_os: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_os), query=__generic_query(typename=schemas.FilterType.user_os), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_browser: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_browser), query=__generic_query(typename=schemas.FilterType.user_browser), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_device: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_device), query=__generic_query(typename=schemas.FilterType.user_device), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_country: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_country), query=__generic_query(typename=schemas.FilterType.user_country), - value_limit=2, - starts_with="", - starts_limit=2, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_id: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_id), query=__generic_query(typename=schemas.FilterType.user_id), - value_limit=2, - starts_with="", - starts_limit=2, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_anonymous_id: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_anonymous_id), query=__generic_query(typename=schemas.FilterType.user_anonymous_id), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.rev_id: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.rev_id), query=__generic_query(typename=schemas.FilterType.rev_id), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.referrer: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.referrer), query=__generic_query(typename=schemas.FilterType.referrer), - value_limit=5, - starts_with="/", - starts_limit=5, - ignore_if_starts_with=[]), + change_by_length=True), schemas.FilterType.utm_campaign: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.utm_campaign), query=__generic_query(typename=schemas.FilterType.utm_campaign), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.utm_medium: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.utm_medium), query=__generic_query(typename=schemas.FilterType.utm_medium), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.utm_source: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.utm_source), query=__generic_query(typename=schemas.FilterType.utm_source), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), # IOS schemas.FilterType.user_os_ios: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_os_ios), query=__generic_query(typename=schemas.FilterType.user_os_ios), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_device_ios: SupportedFilter( get=__generic_autocomplete( typename=schemas.FilterType.user_device_ios), query=__generic_query(typename=schemas.FilterType.user_device_ios), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_country_ios: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_country_ios), query=__generic_query(typename=schemas.FilterType.user_country_ios), - value_limit=2, - starts_with="", - starts_limit=2, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_id_ios: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_id_ios), query=__generic_query(typename=schemas.FilterType.user_id_ios), - value_limit=2, - starts_with="", - starts_limit=2, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.user_anonymous_id_ios: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.user_anonymous_id_ios), query=__generic_query(typename=schemas.FilterType.user_anonymous_id_ios), - value_limit=3, - starts_with="", - starts_limit=3, - ignore_if_starts_with=["/"]), + change_by_length=True), schemas.FilterType.rev_id_ios: SupportedFilter( get=__generic_autocomplete(typename=schemas.FilterType.rev_id_ios), query=__generic_query(typename=schemas.FilterType.rev_id_ios), - value_limit=0, - starts_with="", - starts_limit=0, - ignore_if_starts_with=["/"]), + change_by_length=True), } @@ -247,6 +205,7 @@ def search(text, meta_type, project_id): if meta_type not in list(SUPPORTED_TYPES.keys()): return {"errors": ["unsupported type"]} rows += SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text) - if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()): - rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text) + # for IOS events autocomplete + # if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()): + # rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text) return {"data": rows} diff --git a/api/chalicelib/utils/event_filter_definition.py b/api/chalicelib/utils/event_filter_definition.py index 4c132cb13..b21d49b9c 100644 --- a/api/chalicelib/utils/event_filter_definition.py +++ b/api/chalicelib/utils/event_filter_definition.py @@ -6,10 +6,7 @@ class Event: class SupportedFilter: - def __init__(self, get, query, value_limit, starts_with, starts_limit, ignore_if_starts_with): + def __init__(self, get, query, change_by_length): self.get = get self.query = query - self.valueLimit = value_limit - self.startsWith = starts_with - self.startsLimit = starts_limit - self.ignoreIfStartsWith = ignore_if_starts_with + self.change_by_length = change_by_length diff --git a/api/routers/core.py b/api/routers/core.py index 73ae5fc20..8393f55f5 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -126,7 +126,7 @@ def events_search(projectId: int, q: str, else: return {"data": []} - result = events.search_pg2(text=q, event_type=type, project_id=projectId, source=source, key=key) + result = events.search(text=q, event_type=type, project_id=projectId, source=source, key=key) return result diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql index a2a8597ec..d043cedcb 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql @@ -8,18 +8,18 @@ $$ LANGUAGE sql IMMUTABLE; COMMIT; -CREATE INDEX CONCURRENTLY autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK'; -CREATE INDEX CONCURRENTLY autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM'; -CREATE INDEX CONCURRENTLY autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL'; -CREATE INDEX CONCURRENTLY autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT'; -CREATE INDEX CONCURRENTLY autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION'; -CREATE INDEX CONCURRENTLY autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER'; -CREATE INDEX CONCURRENTLY autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST'; -CREATE INDEX CONCURRENTLY autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID'; -CREATE INDEX CONCURRENTLY autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION'; -CREATE INDEX CONCURRENTLY autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID'; -CREATE INDEX CONCURRENTLY autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER'; -CREATE INDEX CONCURRENTLY autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY'; -CREATE INDEX CONCURRENTLY autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE'; -CREATE INDEX CONCURRENTLY autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID'; -CREATE INDEX CONCURRENTLY autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS'; diff --git a/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql b/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql index 7c1683677..e03c8dfc7 100644 --- a/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql +++ b/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql @@ -8,18 +8,18 @@ $$ LANGUAGE sql IMMUTABLE; COMMIT; -CREATE INDEX CONCURRENTLY autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK'; -CREATE INDEX CONCURRENTLY autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM'; -CREATE INDEX CONCURRENTLY autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL'; -CREATE INDEX CONCURRENTLY autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT'; -CREATE INDEX CONCURRENTLY autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION'; -CREATE INDEX CONCURRENTLY autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER'; -CREATE INDEX CONCURRENTLY autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST'; -CREATE INDEX CONCURRENTLY autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID'; -CREATE INDEX CONCURRENTLY autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION'; -CREATE INDEX CONCURRENTLY autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID'; -CREATE INDEX CONCURRENTLY autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER'; -CREATE INDEX CONCURRENTLY autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY'; -CREATE INDEX CONCURRENTLY autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE'; -CREATE INDEX CONCURRENTLY autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID'; -CREATE INDEX CONCURRENTLY autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID'; +CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS'; From 2c50d9bda61332e09123a518f2b4e61a7b01955b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 10 Mar 2022 21:02:38 +0100 Subject: [PATCH 10/60] feat(api): autocomplete fixed DISTINCT values --- api/chalicelib/core/events.py | 25 ++++++++++--------------- api/chalicelib/core/sessions_metas.py | 6 +++--- 2 files changed, 13 insertions(+), 18 deletions(-) diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index a8c43e398..c84dd1178 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -285,11 +285,6 @@ def __search_pg_metadata(project_id, value, key=None, source=None): WHERE project_id = %(project_id)s AND {colname} ILIKE %(svalue)s LIMIT 5)""") with pg_client.PostgresClient() as cur: - print(cur.mogrify(f"""\ - SELECT key, value, 'METADATA' AS TYPE - FROM({" UNION ALL ".join(sub_from)}) AS all_metas - LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value)})) cur.execute(cur.mogrify(f"""\ SELECT key, value, 'METADATA' AS TYPE FROM({" UNION ALL ".join(sub_from)}) AS all_metas @@ -301,7 +296,7 @@ def __search_pg_metadata(project_id, value, key=None, source=None): def __generic_query(typename, value_length=None): if value_length is None or value_length > 2: - return f"""(SELECT value, type + return f"""(SELECT DISTINCT value, type FROM public.autocomplete WHERE project_id = %(project_id)s @@ -309,14 +304,14 @@ def __generic_query(typename, value_length=None): AND value ILIKE %(svalue)s LIMIT 5) UNION - (SELECT value, type + (SELECT DISTINCT value, type FROM public.autocomplete WHERE project_id = %(project_id)s AND type='{typename}' AND value ILIKE %(value)s LIMIT 5);""" - return f"""SELECT value, type + return f"""SELECT DISTINCT value, type FROM public.autocomplete WHERE project_id = %(project_id)s @@ -432,15 +427,15 @@ def __get_merged_queries(queries, value, project_id): def __get_autocomplete_table(value, project_id): with pg_client.PostgresClient() as cur: if len(value) > 2: - query = """SELECT DISTINCT ON(value,type) project_id, value, type - FROM (SELECT project_id, type, value - FROM (SELECT *, + query = """SELECT DISTINCT value, type + FROM (SELECT type, value + FROM (SELECT type, value, ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID FROM public.autocomplete WHERE project_id = %(project_id)s AND value ILIKE %(svalue)s UNION - SELECT *, + SELECT type, value, ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID FROM public.autocomplete WHERE project_id = %(project_id)s @@ -448,9 +443,9 @@ def __get_autocomplete_table(value, project_id): WHERE Row_ID <= 5) AS sfa ORDER BY sfa.type;""" else: - query = """SELECT DISTINCT ON(value,type) project_id, value, type - FROM (SELECT project_id, type, value - FROM (SELECT *, + query = """SELECT DISTINCT value, type + FROM (SELECT type, value + FROM (SELECT type, value, ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID FROM public.autocomplete WHERE project_id = %(project_id)s diff --git a/api/chalicelib/core/sessions_metas.py b/api/chalicelib/core/sessions_metas.py index d605e5e1c..07aad2ee4 100644 --- a/api/chalicelib/core/sessions_metas.py +++ b/api/chalicelib/core/sessions_metas.py @@ -82,7 +82,7 @@ def get_top_key_values(project_id): def __generic_query(typename, value_length=None): if value_length is None or value_length > 2: - return f""" (SELECT value, type + return f""" (SELECT DISTINCT value, type FROM public.autocomplete WHERE project_id = %(project_id)s @@ -91,7 +91,7 @@ def __generic_query(typename, value_length=None): ORDER BY value LIMIT 5) UNION - (SELECT value, type + (SELECT DISTINCT value, type FROM public.autocomplete WHERE project_id = %(project_id)s @@ -99,7 +99,7 @@ def __generic_query(typename, value_length=None): AND value ILIKE %(value)s ORDER BY value LIMIT 5);""" - return f""" SELECT value, type + return f""" SELECT DISTINCT value, type FROM public.autocomplete WHERE project_id = %(project_id)s From b4de14423dad232f734b4f8ad91b562ab759d366 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 10 Mar 2022 21:50:48 +0100 Subject: [PATCH 11/60] feat(api): search bookmarked sessions --- api/chalicelib/core/alerts_processor.py | 6 ++--- api/chalicelib/core/errors.py | 2 +- api/chalicelib/core/sessions.py | 29 ++++++++++++++----------- api/routers/core.py | 2 +- api/schemas.py | 1 + 5 files changed, 21 insertions(+), 19 deletions(-) diff --git a/api/chalicelib/core/alerts_processor.py b/api/chalicelib/core/alerts_processor.py index 21249773c..4fd7f27ec 100644 --- a/api/chalicelib/core/alerts_processor.py +++ b/api/chalicelib/core/alerts_processor.py @@ -103,10 +103,8 @@ def Build(a): a["filter"]["startDate"] = -1 a["filter"]["endDate"] = TimeUTC.now() full_args, query_part, sort = sessions.search_query_parts( - data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), - error_status=None, errors_only=False, - favorite_only=False, issue=None, project_id=a["projectId"], - user_id=None) + data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False, + issue=None, project_id=a["projectId"], user_id=None) subQ = f"""SELECT COUNT(session_id) AS value {query_part}""" else: diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index 48e6b7bd9..c4825e163 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -439,7 +439,7 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F data["endDate"] = TimeUTC.now(1) if len(data.get("events", [])) > 0 or len(data.get("filters", [])) > 0 or status != "ALL" or favorite_only: statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True, - error_status=status, favorite_only=favorite_only) + error_status=status) if len(statuses) == 0: return {"data": { 'total': 0, diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index b67df2a1e..6c9ec24e6 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -168,10 +168,9 @@ def _isUndefined_operator(op: schemas.SearchEventOperator): @dev.timed -def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, favorite_only=False, errors_only=False, +def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, error_status="ALL", count_only=False, issue=None): - full_args, query_part, sort = search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id, - user_id) + full_args, query_part, sort = search_query_parts(data, error_status, errors_only, issue, project_id, user_id) if data.limit is not None and data.page is not None: full_args["sessions_limit_s"] = (data.page - 1) * data.limit full_args["sessions_limit_e"] = data.page * data.limit @@ -229,9 +228,9 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions;""", full_args) - # print("--------------------") - # print(main_query) - # print("--------------------") + print("--------------------") + print(main_query) + print("--------------------") cur.execute(main_query) if count_only: @@ -282,7 +281,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue, operator=schemas.SearchEventOperator._is)) full_args, query_part, sort = search_query_parts(data=data, error_status=None, errors_only=False, - favorite_only=False, issue=None, project_id=project_id, + issue=None, project_id=project_id, user_id=None, extra_event=extra_event) full_args["step_size"] = step_size sessions = [] @@ -366,7 +365,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d return sessions -def search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id, user_id, extra_event=None): +def search_query_parts(data, error_status, errors_only, issue, project_id, user_id, extra_event=None): ss_constraints = [] full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate, "projectId": project_id, "userId": user_id} @@ -376,10 +375,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr ] extra_from = "" fav_only_join = "" - if favorite_only and not errors_only: + if data.bookmarked and not errors_only: fav_only_join = "LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id" - extra_constraints.append("fs.user_id = %(userId)s") - full_args["userId"] = user_id + # extra_constraints.append("fs.user_id = %(userId)s") events_query_part = "" if len(data.filters) > 0: meta_keys = None @@ -971,11 +969,16 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr if error_status != "ALL": extra_constraints.append("ser.status = %(error_status)s") full_args["status"] = error_status.lower() - if favorite_only: + if data.bookmarked: extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" extra_constraints.append("ufe.user_id = %(user_id)s") # extra_constraints = [extra.decode('UTF-8') + "\n" for extra in extra_constraints] - if not favorite_only and not errors_only and user_id is not None: + if data.bookmarked and not errors_only and user_id is not None: + extra_from += """INNER JOIN (SELECT user_id, session_id + FROM public.user_favorite_sessions + WHERE user_id = %(userId)s) AS favorite_sessions + USING (session_id)""" + elif not data.bookmarked and not errors_only and user_id is not None: extra_from += """LEFT JOIN (SELECT user_id, session_id FROM public.user_favorite_sessions WHERE user_id = %(userId)s) AS favorite_sessions diff --git a/api/routers/core.py b/api/routers/core.py index 73ae5fc20..86ba57daf 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -133,7 +133,7 @@ def events_search(projectId: int, q: str, @app.post('/{projectId}/sessions/search2', tags=["sessions"]) def sessions_search2(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - data = sessions.search2_pg(data, projectId, user_id=context.user_id) + data = sessions.search2_pg(data=data, project_id=projectId, user_id=context.user_id) return {'data': data} diff --git a/api/schemas.py b/api/schemas.py index cf4ae6cd3..c89d9ad56 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -614,6 +614,7 @@ class SessionsSearchPayloadSchema(BaseModel): group_by_user: bool = Field(default=False) limit: int = Field(default=200, gt=0, le=200) page: int = Field(default=1, gt=0) + bookmarked: bool = Field(default=False) class Config: alias_generator = attribute_to_camel_case From 0b9eebbf2cd0cf4d67a075e23ac9ddc09762643b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 11 Mar 2022 17:37:51 +0100 Subject: [PATCH 12/60] feat(api): global autocomplete change --- api/chalicelib/core/events.py | 77 +++++++++++++---------------------- api/routers/core.py | 11 ----- 2 files changed, 29 insertions(+), 59 deletions(-) diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index c84dd1178..f801f609d 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -323,14 +323,6 @@ def __generic_query(typename, value_length=None): def __generic_autocomplete(event: Event): def f(project_id, value, key=None, source=None): with pg_client.PostgresClient() as cur: - print( - cur.mogrify( - __generic_query(event.ui_type, - value_length=len(value) \ - if SUPPORTED_TYPES[event.ui_type].change_by_length \ - else None), - {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value)})) cur.execute( cur.mogrify( __generic_query(event.ui_type, @@ -412,48 +404,37 @@ SUPPORTED_TYPES = { } -def __get_merged_queries(queries, value, project_id): - if len(queries) == 0: - return [] - now = TimeUTC.now() - with pg_client.PostgresClient() as cur: - cur.execute(cur.mogrify("(" + ")UNION ALL(".join(queries) + ")", - {"project_id": project_id, "value": helper.string_to_sql_like(value)})) - results = helper.list_to_camel_case(cur.fetchall()) - print(f"{TimeUTC.now() - now} : merged-queries for len: {len(queries)}") - return results - - def __get_autocomplete_table(value, project_id): - with pg_client.PostgresClient() as cur: + autocomplete_events = [schemas.FilterType.rev_id, + schemas.EventType.click, + schemas.FilterType.user_device, + schemas.FilterType.user_id, + schemas.FilterType.user_browser, + schemas.FilterType.user_os, + schemas.EventType.custom, + schemas.FilterType.user_country, + schemas.EventType.location, + schemas.EventType.input] + autocomplete_events.sort() + sub_queries = [] + for e in autocomplete_events: + sub_queries.append(f"""(SELECT DISTINCT type, value + FROM public.autocomplete + WHERE project_id = %(project_id)s + AND type= '{e}' + AND value ILIKE %(svalue)s + LIMIT 5)""") if len(value) > 2: - query = """SELECT DISTINCT value, type - FROM (SELECT type, value - FROM (SELECT type, value, - ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID - FROM public.autocomplete - WHERE project_id = %(project_id)s - AND value ILIKE %(svalue)s - UNION - SELECT type, value, - ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID - FROM public.autocomplete - WHERE project_id = %(project_id)s - AND value ILIKE %(value)s) AS u - WHERE Row_ID <= 5) AS sfa - ORDER BY sfa.type;""" - else: - query = """SELECT DISTINCT value, type - FROM (SELECT type, value - FROM (SELECT type, value, - ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID - FROM public.autocomplete - WHERE project_id = %(project_id)s - AND value ILIKE %(svalue)s) AS u - WHERE Row_ID <= 5) AS sfa - ORDER BY sfa.type;""" - query = cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value), - "svalue": helper.string_to_sql_like("^" + value)}) + sub_queries.append(f"""(SELECT DISTINCT type, value + FROM public.autocomplete + WHERE project_id = %(project_id)s + AND type= '{e}' + AND value ILIKE %(value)s + LIMIT 5)""") + with pg_client.PostgresClient() as cur: + query = cur.mogrify("UNION".join(sub_queries) + ";", + {"project_id": project_id, "value": helper.string_to_sql_like(value), + "svalue": helper.string_to_sql_like("^" + value)}) cur.execute(query) results = helper.list_to_camel_case(cur.fetchall()) return results diff --git a/api/routers/core.py b/api/routers/core.py index 8393f55f5..1953e33b9 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -147,17 +147,6 @@ def session_top_filter_values(projectId: int, context: schemas.CurrentContext = return {'data': sessions_metas.get_top_key_values(projectId)} -@app.get('/{projectId}/sessions/filters/search', tags=["sessions"]) -def get_session_filters_meta(projectId: int, q: str, type: str, - context: schemas.CurrentContext = Depends(OR_context)): - meta_type = type - if len(meta_type) == 0: - return {"data": []} - if len(q) == 0: - return {"data": []} - return sessions_metas.search(project_id=projectId, meta_type=meta_type, text=q) - - @app.post('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"]) @app.put('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"]) def integration_notify(projectId: int, integration: str, integrationId: int, source: str, sourceId: str, From 3177887973b27d5400d4a727f546fdbcbaea70f4 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 11 Mar 2022 18:47:37 +0100 Subject: [PATCH 13/60] feat(api): global autocomplete changed union type feat(api): global autocomplete removed DISTINCT because values are should be unique for each project --- api/chalicelib/core/events.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index f801f609d..db515d995 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -418,21 +418,21 @@ def __get_autocomplete_table(value, project_id): autocomplete_events.sort() sub_queries = [] for e in autocomplete_events: - sub_queries.append(f"""(SELECT DISTINCT type, value + sub_queries.append(f"""(SELECT type, value FROM public.autocomplete WHERE project_id = %(project_id)s AND type= '{e}' AND value ILIKE %(svalue)s LIMIT 5)""") if len(value) > 2: - sub_queries.append(f"""(SELECT DISTINCT type, value + sub_queries.append(f"""(SELECT type, value FROM public.autocomplete WHERE project_id = %(project_id)s AND type= '{e}' AND value ILIKE %(value)s LIMIT 5)""") with pg_client.PostgresClient() as cur: - query = cur.mogrify("UNION".join(sub_queries) + ";", + query = cur.mogrify("UNION ALL".join(sub_queries) + ";", {"project_id": project_id, "value": helper.string_to_sql_like(value), "svalue": helper.string_to_sql_like("^" + value)}) cur.execute(query) From f10f073e510901d477f4de4cc9bac0be8e029094 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 11 Mar 2022 20:16:53 +0100 Subject: [PATCH 14/60] feat(api): removed unused session search --- api/chalicelib/core/sessions.py | 42 --------------------------------- api/routers/core.py | 7 ------ 2 files changed, 49 deletions(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index b67df2a1e..b404f0c2b 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -1102,48 +1102,6 @@ def search_by_issue(user_id, issue, project_id, start_date, end_date): return helper.list_to_camel_case(rows) -def get_favorite_sessions(project_id, user_id, include_viewed=False): - with pg_client.PostgresClient() as cur: - query_part = cur.mogrify(f"""\ - FROM public.sessions AS s - LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id - WHERE fs.user_id = %(userId)s""", - {"projectId": project_id, "userId": user_id} - ) - - extra_query = b"" - if include_viewed: - extra_query = cur.mogrify(""",\ - COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS viewed""", - {"projectId": project_id, "userId": user_id}) - - cur.execute(f"""\ - SELECT s.project_id, - s.session_id::text AS session_id, - s.user_uuid, - s.user_id, - s.user_os, - s.user_browser, - s.user_device, - s.user_country, - s.start_ts, - s.duration, - s.events_count, - s.pages_count, - s.errors_count, - TRUE AS favorite - {extra_query.decode('UTF-8')} - {query_part.decode('UTF-8')} - ORDER BY s.session_id - LIMIT 50;""") - - sessions = cur.fetchall() - return helper.list_to_camel_case(sessions) - - def get_user_sessions(project_id, user_id, start_date, end_date): with pg_client.PostgresClient() as cur: constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"] diff --git a/api/routers/core.py b/api/routers/core.py index 1953e33b9..d5198a2e3 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -21,13 +21,6 @@ from routers.base import get_routers public_app, app, app_apikey = get_routers() -@app.get('/{projectId}/sessions2/favorite', tags=["sessions"]) -def get_favorite_sessions(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - return { - 'data': sessions.get_favorite_sessions(project_id=projectId, user_id=context.user_id, include_viewed=True) - } - - @app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"]) def get_session2(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): From 527bce9936c346c061434eb62a22365d5f166299 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 11 Mar 2022 22:10:59 +0100 Subject: [PATCH 15/60] feat(api): changed funnels to support ghost stages --- api/chalicelib/core/funnels.py | 98 +++++++++++++++++++---------- api/chalicelib/core/significance.py | 14 +++-- api/routers/core.py | 9 +-- api/schemas.py | 2 + 4 files changed, 81 insertions(+), 42 deletions(-) diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py index cdd6cec20..70a34c4f6 100644 --- a/api/chalicelib/core/funnels.py +++ b/api/chalicelib/core/funnels.py @@ -1,4 +1,5 @@ import json +from typing import List import chalicelib.utils.helper import schemas @@ -12,12 +13,38 @@ REMOVE_KEYS = ["key", "_key", "startDate", "endDate"] ALLOW_UPDATE_FOR = ["name", "filter"] -# def filter_stages(stages): -# ALLOW_TYPES = [events.event_type.CLICK.ui_type, events.event_type.INPUT.ui_type, -# events.event_type.LOCATION.ui_type, events.event_type.CUSTOM.ui_type, -# events.event_type.CLICK_IOS.ui_type, events.event_type.INPUT_IOS.ui_type, -# events.event_type.VIEW_IOS.ui_type, events.event_type.CUSTOM_IOS.ui_type, ] -# return [s for s in stages if s["type"] in ALLOW_TYPES and s.get("value") is not None] +def filter_stages(stages: List[schemas._SessionSearchEventSchema]): + ALLOW_TYPES = [schemas.EventType.click, schemas.EventType.input, + schemas.EventType.location, schemas.EventType.custom, + schemas.EventType.click_ios, schemas.EventType.input_ios, + schemas.EventType.view_ios, schemas.EventType.custom_ios, ] + return [s for s in stages if s.type in ALLOW_TYPES and s.value is not None] + + +def __parse_events(f_events: List[dict]): + return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events] + + +def __unparse_events(f_events: List[schemas._SessionSearchEventSchema]): + return [e.dict() for e in f_events] + + +def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]): + if f_events is None: + return + events = [] + for e in f_events: + if e.operator is None: + e.operator = schemas.SearchEventOperator._is + + if not isinstance(e.value, list): + e.value = [e.value] + is_any = sessions._isAny_opreator(e.operator) + if not is_any and isinstance(e.value, list) and len(e.value) == 0: + continue + events.append(e) + return events + def __transform_old_funnels(events): for e in events: @@ -28,7 +55,7 @@ def __transform_old_funnels(events): def create(project_id, user_id, name, filter: schemas.FunnelSearchPayloadSchema, is_public): helper.delete_keys_from_dict(filter, REMOVE_KEYS) - # filter.events = filter_stages(stages=filter.events) + filter.events = filter_stages(stages=filter.events) with pg_client.PostgresClient() as cur: query = cur.mogrify("""\ INSERT INTO public.funnels (project_id, user_id, name, filter,is_public) @@ -76,6 +103,8 @@ def update(funnel_id, user_id, project_id, name=None, filter=None, is_public=Non query ) r = cur.fetchone() + if r is None: + return {"errors": ["funnel not found"]} r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"]) r = helper.dict_to_camel_case(r) r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"]) @@ -102,9 +131,9 @@ def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date for row in rows: row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) if details: - # row["filter"]["events"] = filter_stages(row["filter"]["events"]) + row["filter"]["events"] = filter_stages(__parse_events(row["filter"]["events"])) if row.get("filter") is not None and row["filter"].get("events") is not None: - row["filter"]["events"] = __transform_old_funnels(row["filter"]["events"]) + row["filter"]["events"] = __transform_old_funnels(__unparse_events(row["filter"]["events"])) get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date, end_date=end_date) @@ -168,7 +197,7 @@ def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=No def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema): - # data.events = filter_stages(data.events) + data.events = filter_stages(data.events) if len(data.events) == 0: f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id) if f is None: @@ -192,17 +221,18 @@ def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_dat "totalDropDueToIssues": total_drop_due_to_issues}} -def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data): - # data["events"] = filter_stages(data.get("events", [])) - if len(data["events"]) == 0: +def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema): + data.events = filter_stages(__parse_events(data.events)) + if len(data.events) == 0: f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id) if f is None: return {"errors": ["funnel not found"]} - get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None), - start_date=data.get('startDate', None), - end_date=data.get('endDate', None)) - data = f["filter"] - insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data, project_id=project_id) + get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue, + start_date=data.startDate, + end_date=data.endDate) + data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"]) + data.events = __fix_stages(data.events) + insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id) if len(insights) > 0: insights[-1]["dropDueToIssues"] = total_drop_due_to_issues return {"data": {"stages": helper.list_to_camel_case(insights), @@ -220,25 +250,25 @@ def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None @dev.timed -def get_issues_on_the_fly(funnel_id, user_id, project_id, data): - first_stage = data.get("firstStage") - last_stage = data.get("lastStage") - # data["events"] = filter_stages(data.get("events", [])) - if len(data["events"]) == 0: +def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema): + data.events = filter_stages(data.events) + if len(data.events) == 0: f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id) if f is None: return {"errors": ["funnel not found"]} - get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None), - start_date=data.get('startDate', None), - end_date=data.get('endDate', None)) - data = f["filter"] + get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue, + start_date=data.startDate, + end_date=data.endDate) + data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"]) + if len(data.events) < 2: + return {"issues": []} return { "issues": helper.dict_to_camel_case( - significance.get_issues_list(filter_d=data, project_id=project_id, first_stage=first_stage, - last_stage=last_stage))} + significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=data.events[0].dict(), + last_stage=data.events[-1].dict()))} -def get(funnel_id, project_id, user_id, flatten=True): +def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify( @@ -260,7 +290,11 @@ def get(funnel_id, project_id, user_id, flatten=True): if f.get("filter") is not None and f["filter"].get("events") is not None: f["filter"]["events"] = __transform_old_funnels(f["filter"]["events"]) f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"]) - # f["filter"]["events"] = filter_stages(stages=f["filter"]["events"]) + f["filter"]["events"] = __parse_events(f["filter"]["events"]) + f["filter"]["events"] = filter_stages(stages=f["filter"]["events"]) + if fix_stages: + f["filter"]["events"] = __fix_stages(f["filter"]["events"]) + f["filter"]["events"] = [e.dict() for e in f["filter"]["events"]] if flatten: f["filter"] = helper.old_search_payload_to_flat(f["filter"]) return f @@ -279,7 +313,7 @@ def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.Funn end_date=data.endDate) data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"]) - issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data.dict()) \ + issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data) \ .get("issues", {}) issues = issues.get("significant", []) + issues.get("insignificant", []) issue = None diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index ab242d7e8..035890e2f 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -118,12 +118,9 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: first_stage_extra_constraints.append( sessions._multiple_conditions(f's.rev_id {op} %({f_k})s', f["value"], value_key=f_k)) # values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op) + i = -1 + for s in stages: - for i, s in enumerate(stages): - if i == 0: - extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"] - else: - extra_from = [] if s.get("operator") is None: s["operator"] = "is" @@ -132,6 +129,11 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: is_any = sessions._isAny_opreator(s["operator"]) if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0: continue + i += 1 + if i == 0: + extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"] + else: + extra_from = [] op = sessions.__get_sql_operator(s["operator"]) event_type = s["type"].upper() if event_type == events.event_type.CLICK.ui_type: @@ -213,7 +215,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: ISS.issue_id as issue_id FROM events_common.issues AS ISE INNER JOIN issues AS ISS USING (issue_id) WHERE ISE.timestamp >= stages_t.stage1_timestamp - AND ISE.timestamp <= stages_t.stage{len(stages)}_timestamp + AND ISE.timestamp <= stages_t.stage{i + 1}_timestamp AND ISS.project_id=%(project_id)s {"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""}) AS base_t ) AS issues_t diff --git a/api/routers/core.py b/api/routers/core.py index 73ae5fc20..4433ec331 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -716,7 +716,7 @@ def get_funnel_insights(projectId: int, funnelId: int, rangeValue: str = None, s def get_funnel_insights_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelInsightsPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return funnels.get_top_insights_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId, - data=data.dict()) + data=data) @app.get('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"]) @@ -731,7 +731,7 @@ def get_funnel_issues(projectId: int, funnelId, rangeValue: str = None, startDat def get_funnel_issues_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": funnels.get_issues_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId, - data=data.dict())} + data=data)} @app.get('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"]) @@ -755,10 +755,11 @@ def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas. def get_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None, context: schemas.CurrentContext = Depends(OR_context)): issue = issues.get(project_id=projectId, issue_id=issueId) + if issue is None: + return {"errors": ["issue not found"]} return { "data": {"sessions": sessions.search_by_issue(user_id=context.user_id, project_id=projectId, issue=issue, - start_date=startDate, - end_date=endDate), + start_date=startDate, end_date=endDate), "issue": issue}} diff --git a/api/schemas.py b/api/schemas.py index cf4ae6cd3..0a5872d46 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -662,6 +662,7 @@ class FunnelSearchPayloadSchema(FlatSessionsSearchPayloadSchema): order: Optional[str] = Field(None) events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True) group_by_user: Optional[bool] = Field(default=False, const=True) + rangeValue: Optional[str] = Field(None) @root_validator(pre=True) def enforce_default_values(cls, values): @@ -694,6 +695,7 @@ class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema): order: Optional[str] = Field(None) events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True) group_by_user: Optional[bool] = Field(default=False, const=True) + rangeValue: Optional[str] = Field(None) class MetricPayloadSchema(BaseModel): From c9717d1ff81348a96aa8730add91669c0906466b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 11 Mar 2022 22:28:09 +0100 Subject: [PATCH 16/60] feat(api): changed funnels first_stage last_stage --- api/chalicelib/core/funnels.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py index 70a34c4f6..b522ed0a7 100644 --- a/api/chalicelib/core/funnels.py +++ b/api/chalicelib/core/funnels.py @@ -264,8 +264,8 @@ def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSe return {"issues": []} return { "issues": helper.dict_to_camel_case( - significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=data.events[0].dict(), - last_stage=data.events[-1].dict()))} + significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1, + last_stage=len(data.events)))} def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True): From 007867f291ec9325e183ae85d9c90681ced7507c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 11 Mar 2022 22:37:14 +0100 Subject: [PATCH 17/60] feat(api): changed funnels to support ghost stages on the fly --- api/chalicelib/core/funnels.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py index b522ed0a7..adcb1167e 100644 --- a/api/chalicelib/core/funnels.py +++ b/api/chalicelib/core/funnels.py @@ -198,6 +198,7 @@ def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=No def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema): data.events = filter_stages(data.events) + data.events = __fix_stages(data.events) if len(data.events) == 0: f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id) if f is None: @@ -252,6 +253,7 @@ def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None @dev.timed def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema): data.events = filter_stages(data.events) + data.events = __fix_stages(data.events) if len(data.events) == 0: f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id) if f is None: From 1adf08b7a9e8c17e1ace4f946cda3b92cf3f72db Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 14 Mar 2022 11:59:40 +0100 Subject: [PATCH 18/60] fix(ui) - funnels date change --- .../Funnels/FunnelHeader/FunnelHeader.js | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/frontend/app/components/Funnels/FunnelHeader/FunnelHeader.js b/frontend/app/components/Funnels/FunnelHeader/FunnelHeader.js index 7f59f2d28..6130351e2 100644 --- a/frontend/app/components/Funnels/FunnelHeader/FunnelHeader.js +++ b/frontend/app/components/Funnels/FunnelHeader/FunnelHeader.js @@ -1,7 +1,7 @@ import React, { useEffect, useState } from 'react'; import { Icon, BackLink, IconButton, Dropdown, Popup, TextEllipsis, Button } from 'UI'; import { remove as deleteFunnel, fetch, fetchInsights, fetchIssuesFiltered, fetchSessionsFiltered } from 'Duck/funnels'; -import { editFilter, addFilter } from 'Duck/funnels'; +import { editFilter, refresh, addFilter } from 'Duck/funnels'; import DateRange from 'Shared/DateRange'; import { connect } from 'react-redux'; import { confirm } from 'UI/Confirmation'; @@ -19,16 +19,11 @@ const Info = ({ label = '', value = '', className = 'mx-4' }) => { const FunnelHeader = (props) => { const { funnel, insights, funnels, onBack, funnelId, showFilters = false, renameHandler } = props; - const [showSaveModal, setShowSaveModal] = useState(false) const writeOption = (e, { name, value }) => { - props.fetch(value).then(() => { - props.fetchInsights(value, {}) - props.fetchIssuesFiltered(value, {}) - props.fetchSessionsFiltered(value, {}) - props.redirect(value) - }) + props.redirect(value) + props.fetch(value).then(() => props.refresh(value)) } const deleteFunnel = async (e, funnel) => { @@ -45,11 +40,12 @@ const FunnelHeader = (props) => { } const onDateChange = (e) => { - props.editFilter(e, funnel.funnelId); + props.editFilter(e, funnelId); } const options = funnels.map(({ funnelId, name }) => ({ text: name, value: funnelId })).toJS(); const selectedFunnel = funnels.filter(i => i.funnelId === parseInt(funnelId)).first() || {}; + const eventsCount = funnel.filter.filters.filter(i => i.isEvent).size; return (
@@ -76,7 +72,7 @@ const FunnelHeader = (props) => { selectOnBlur={false} icon={ } /> - + - @@ -114,4 +110,4 @@ const FunnelHeader = (props) => { export default connect(state => ({ funnel: state.getIn([ 'funnels', 'instance' ]), -}), { editFilter, deleteFunnel, fetch, fetchInsights, fetchIssuesFiltered, fetchSessionsFiltered })(FunnelHeader) +}), { editFilter, deleteFunnel, fetch, fetchInsights, fetchIssuesFiltered, fetchSessionsFiltered, refresh })(FunnelHeader) From 341954c9d9851da682433579d459a2b484667d50 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Mon, 14 Mar 2022 13:28:30 +0100 Subject: [PATCH 19/60] fix(actions): get worker images Signed-off-by: rjshrjndrn --- .github/workflows/workers-ee.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/workers-ee.yaml b/.github/workflows/workers-ee.yaml index 4588ccb09..a61d75160 100644 --- a/.github/workflows/workers-ee.yaml +++ b/.github/workflows/workers-ee.yaml @@ -47,7 +47,7 @@ jobs: # # Getting the images to build # - git diff --name-only HEAD HEAD~1 | grep backend/services | grep -vE ^ee/ | cut -d '/' -f3 | uniq > backend/images_to_build.txt + git diff --name-only HEAD HEAD~1 | grep backend/services | cut -d '/' -f3 | uniq > backend/images_to_build.txt [[ $(cat backend/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 0) # # Pushing image to registry From 87ef2b3c1c0791ad9316338943b7ade4388f494c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 14 Mar 2022 14:37:02 +0100 Subject: [PATCH 20/60] feat(api): changed get bookmarked errors --- api/chalicelib/core/errors.py | 57 +++++++++++++++++++-------------- api/chalicelib/core/sessions.py | 21 ++++++------ api/routers/core.py | 3 +- api/schemas.py | 13 +++----- 4 files changed, 49 insertions(+), 45 deletions(-) diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index 48e6b7bd9..2545480ac 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -1,5 +1,6 @@ import json +import schemas from chalicelib.core import sourcemaps, sessions from chalicelib.utils import pg_client, helper, dev from chalicelib.utils.TimeUTC import TimeUTC @@ -405,9 +406,9 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n if chart: ch_sub_query += [f"timestamp >= generated_timestamp", f"timestamp < generated_timestamp + %({step_size_name})s"] - if platform == 'mobile': + if platform == schemas.PlatformType.mobile: ch_sub_query.append("user_device_type = 'mobile'") - elif platform == 'desktop': + elif platform == schemas.PlatformType.desktop: ch_sub_query.append("user_device_type = 'desktop'") return ch_sub_query @@ -421,23 +422,28 @@ def __get_sort_key(key): @dev.timed -def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=False): +def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, status="ALL", favorite_only=False): status = status.upper() if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']: return {"errors": ["invalid error status"]} - pg_sub_query = __get_basic_constraints(data.get('platform'), project_key="sessions.project_id") + platform = None + for f in data.filters: + if f.type == schemas.FilterType.platform and len(f.value) > 0: + platform = f.value[0] + pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id") pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'", "pe.project_id=%(project_id)s"] - pg_sub_query_chart = __get_basic_constraints(data.get('platform'), time_constraint=False, chart=True) + pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True) pg_sub_query_chart.append("source ='js_exception'") pg_sub_query_chart.append("errors.error_id =details.error_id") statuses = [] error_ids = None - if data.get("startDate") is None: - data["startDate"] = TimeUTC.now(-30) - if data.get("endDate") is None: - data["endDate"] = TimeUTC.now(1) - if len(data.get("events", [])) > 0 or len(data.get("filters", [])) > 0 or status != "ALL" or favorite_only: + if data.startDate is None: + data.startDate = TimeUTC.now(-30) + if data.endDate is None: + data.endDate = TimeUTC.now(1) + if len(data.events) > 0 or len(data.filters) > 0 or status != "ALL": + # if favorite_only=True search for sessions associated with favorite_error statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True, error_status=status, favorite_only=favorite_only) if len(statuses) == 0: @@ -447,28 +453,30 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F }} error_ids = [e["error_id"] for e in statuses] with pg_client.PostgresClient() as cur: - if data.get("startDate") is None: - data["startDate"] = TimeUTC.now(-7) - if data.get("endDate") is None: - data["endDate"] = TimeUTC.now() - density = data.get("density", 7) - step_size = __get_step_size(data["startDate"], data["endDate"], density, factor=1) + if data.startDate is None: + data.startDate = TimeUTC.now(-7) + if data.endDate is None: + data.endDate = TimeUTC.now() + step_size = __get_step_size(data.startDate, data.endDate, data.density, factor=1) sort = __get_sort_key('datetime') - if data.get("sort") is not None: - sort = __get_sort_key(data["sort"]) + if data.sort is not None: + sort = __get_sort_key(data.sort) order = "DESC" - if data.get("order") is not None: - order = data["order"] - + if data.order is not None: + order = data.order + extra_join = "" params = { - "startDate": data['startDate'], - "endDate": data['endDate'], + "startDate": data.startDate, + "endDate": data.endDate, "project_id": project_id, "userId": user_id, "step_size": step_size} if error_ids is not None: params["error_ids"] = tuple(error_ids) pg_sub_query.append("error_id IN %(error_ids)s") + if favorite_only: + pg_sub_query.append("ufe.user_id = %(userId)s") + extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" main_pg_query = f"""\ SELECT error_id, name, @@ -488,6 +496,7 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F FROM events.errors INNER JOIN public.errors AS pe USING (error_id) INNER JOIN public.sessions USING (session_id) + {extra_join} WHERE {" AND ".join(pg_sub_query)} GROUP BY error_id, name, message ORDER BY {sort} {order}) AS details @@ -581,7 +590,7 @@ def __save_stacktrace(error_id, data): def get_trace(project_id, error_id): - error = get(error_id=error_id) + error = get(error_id=error_id, family=False) if error is None: return {"errors": ["error not found"]} if error.get("source", "") != "js_exception": diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index b67df2a1e..a9b5d5908 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -232,11 +232,12 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f # print("--------------------") # print(main_query) # print("--------------------") - cur.execute(main_query) - if count_only: - return helper.dict_to_camel_case(cur.fetchone()) + cur.execute(main_query) sessions = cur.fetchone() + if count_only: + return helper.dict_to_camel_case(sessions) + total = sessions["count"] sessions = sessions["sessions"] # sessions = [] @@ -973,7 +974,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr full_args["status"] = error_status.lower() if favorite_only: extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" - extra_constraints.append("ufe.user_id = %(user_id)s") + extra_constraints.append("ufe.user_id = %(userId)s") # extra_constraints = [extra.decode('UTF-8') + "\n" for extra in extra_constraints] if not favorite_only and not errors_only and user_id is not None: extra_from += """LEFT JOIN (SELECT user_id, session_id @@ -1196,11 +1197,11 @@ def get_session_user(project_id, user_id): "public".sessions WHERE project_id = %(project_id)s - AND user_id = %(user_id)s + AND user_id = %(userId)s AND duration is not null GROUP BY user_id; """, - {"project_id": project_id, "user_id": user_id} + {"project_id": project_id, "userId": user_id} ) cur.execute(query=query) data = cur.fetchone() @@ -1213,8 +1214,8 @@ def get_session_ids_by_user_ids(project_id, user_ids): """\ SELECT session_id FROM public.sessions WHERE - project_id = %(project_id)s AND user_id IN %(user_id)s;""", - {"project_id": project_id, "user_id": tuple(user_ids)} + project_id = %(project_id)s AND user_id IN %(userId)s;""", + {"project_id": project_id, "userId": tuple(user_ids)} ) ids = cur.execute(query=query) return ids @@ -1240,8 +1241,8 @@ def delete_sessions_by_user_ids(project_id, user_ids): """\ DELETE FROM public.sessions WHERE - project_id = %(project_id)s AND user_id IN %(user_id)s;""", - {"project_id": project_id, "user_id": tuple(user_ids)} + project_id = %(project_id)s AND user_id IN %(userId)s;""", + {"project_id": project_id, "userId": tuple(user_ids)} ) cur.execute(query=query) diff --git a/api/routers/core.py b/api/routers/core.py index 4433ec331..220b87950 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -907,8 +907,7 @@ def errors_search(projectId: int, status: str = "ALL", favorite: Union[str, bool context: schemas.CurrentContext = Depends(OR_context)): if isinstance(favorite, str): favorite = True if len(favorite) == 0 else False - return errors.search(data.dict(), projectId, user_id=context.user_id, status=status, - favorite_only=favorite) + return errors.search(data, projectId, user_id=context.user_id, status=status, favorite_only=favorite) @app.get('/{projectId}/errors/stats', tags=['errors']) diff --git a/api/schemas.py b/api/schemas.py index 0a5872d46..cedc0ed07 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -83,15 +83,6 @@ class EditSlackSchema(BaseModel): url: HttpUrl = Field(...) -class SearchErrorsSchema(BaseModel): - platform: Optional[str] = Field(None) - startDate: Optional[int] = Field(TimeUTC.now(-7)) - endDate: Optional[int] = Field(TimeUTC.now()) - density: Optional[int] = Field(7) - sort: Optional[str] = Field(None) - order: Optional[str] = Field(None) - - class CreateNotificationSchema(BaseModel): token: str = Field(...) notifications: List = Field(...) @@ -698,6 +689,10 @@ class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema): rangeValue: Optional[str] = Field(None) +class SearchErrorsSchema(SessionsSearchPayloadSchema): + density: Optional[int] = Field(7) + + class MetricPayloadSchema(BaseModel): startTimestamp: int = Field(TimeUTC.now(delta_days=-1)) endTimestamp: int = Field(TimeUTC.now()) From 3b80ea6c2b41ff15720e155f4ab74985f471a747 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 14 Mar 2022 17:05:20 +0100 Subject: [PATCH 21/60] feat(api): EE fixed get bookmarked error --- ee/api/chalicelib/core/errors.py | 124 ++++++++++++++++++------------- 1 file changed, 72 insertions(+), 52 deletions(-) diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index 04efdbb32..dc4ea17b2 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -1,5 +1,6 @@ import json +import schemas from chalicelib.core import dashboard from chalicelib.core import sourcemaps, sessions from chalicelib.utils import ch_client @@ -265,7 +266,7 @@ def get_details(project_id, error_id, user_id, **data): COALESCE((SELECT TRUE FROM public.user_favorite_errors AS fe WHERE pe.error_id = fe.error_id - AND fe.user_id = %(user_id)s), FALSE) AS favorite, + AND fe.user_id = %(userId)s), FALSE) AS favorite, True AS viewed FROM public.errors AS pe INNER JOIN events.errors AS ee USING (error_id) @@ -274,7 +275,7 @@ def get_details(project_id, error_id, user_id, **data): AND error_id = %(error_id)s ORDER BY start_ts DESC LIMIT 1;""", - {"project_id": project_id, "error_id": error_id, "user_id": user_id}) + {"project_id": project_id, "error_id": error_id, "userId": user_id}) cur.execute(query=query) status = cur.fetchone() @@ -443,54 +444,74 @@ def __get_sort_key(key): }.get(key, 'max_datetime') -def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=False): +def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, status="ALL", favorite_only=False): + empty_response = {"data": { + 'total': 0, + 'errors': [] + }} status = status.upper() if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']: return {"errors": ["invalid error status"]} - ch_sub_query = __get_basic_constraints(data.get('platform')) + platform = None + for f in data.filters: + if f.type == schemas.FilterType.platform and len(f.value) > 0: + platform = f.value[0] + ch_sub_query = __get_basic_constraints(platform) ch_sub_query.append("source ='js_exception'") statuses = [] error_ids = None - if data.get("startDate") is None: - data["startDate"] = TimeUTC.now(-30) - if data.get("endDate") is None: - data["endDate"] = TimeUTC.now(1) - if len(data.get("events", [])) > 0 or len(data.get("filters", [])) > 0 or status != "ALL" or favorite_only: + if data.startDate is None: + data.startDate = TimeUTC.now(-30) + if data.endDate is None: + data.endDate = TimeUTC.now(1) + if len(data.events) > 0 or len(data.filters) > 0 or status != "ALL": + # if favorite_only=True search for sessions associated with favorite_error statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True, error_status=status, favorite_only=favorite_only) - error_ids = [e["error_id"] for e in statuses] if len(statuses) == 0: - return {"data": { - 'total': 0, - 'errors': [] - }} - with ch_client.ClickHouseClient() as ch: - if data.get("startDate") is None: - data["startDate"] = TimeUTC.now(-7) - if data.get("endDate") is None: - data["endDate"] = TimeUTC.now() - density = data.get("density", 7) - step_size = __get_step_size(data["startDate"], data["endDate"], density) + return empty_response + error_ids = [e["error_id"] for e in statuses] + with ch_client.ClickHouseClient() as ch, pg_client.PostgresClient() as cur: + if data.startDate is None: + data.startDate = TimeUTC.now(-7) + if data.endDate is None: + data.endDate = TimeUTC.now() + step_size = __get_step_size(data.startDate, data.endDate, data.density) sort = __get_sort_key('datetime') - if data.get("sort") is not None: - sort = __get_sort_key(data["sort"]) + if data.sort is not None: + sort = __get_sort_key(data.sort) order = "DESC" - if data.get("order") is not None: - order = data["order"] - + if data.order is not None: + order = data.order + extra_join = "" params = { - "startDate": data['startDate'], - "endDate": data['endDate'], + "startDate": data.startDate, + "endDate": data.endDate, "project_id": project_id, "userId": user_id, "step_size": step_size} + if favorite_only: + cur.execute(cur.mogrify(f"""SELECT error_id + FROM public.user_favorite_errors + WHERE user_id = %(userId)s + {"" if error_ids is None else "AND error_id IN %(error_ids)s"}""", + {"userId": user_id, "error_ids": tuple(error_ids or [])})) + error_ids = cur.fetchall() + if len(error_ids) == 0: + return empty_response + error_ids = [e["error_id"] for e in error_ids] + if error_ids is not None: params["error_ids"] = tuple(error_ids) ch_sub_query.append("error_id IN %(error_ids)s") + main_ch_query = f"""\ SELECT COUNT(DISTINCT error_id) AS count FROM errors WHERE {" AND ".join(ch_sub_query)};""" + # print("------------") + # print(ch.client().substitute_params(main_ch_query, params)) + # print("------------") total = ch.execute(query=main_ch_query, params=params)[0]["count"] if flows: return {"data": {"count": total}} @@ -510,7 +531,7 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F WHERE {" AND ".join(ch_sub_query)} GROUP BY error_id, name, message ORDER BY {sort} {order} - LIMIT 1001) AS details INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence + LIMIT 200) AS details INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence FROM errors GROUP BY error_id) AS time_details ON details.error_id=time_details.error_id @@ -527,23 +548,22 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F # print(main_ch_query % params) rows = ch.execute(query=main_ch_query, params=params) if len(statuses) == 0: - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """SELECT error_id, status, parent_error_id, payload, - COALESCE((SELECT TRUE - FROM public.user_favorite_errors AS fe - WHERE errors.error_id = fe.error_id - AND fe.user_id = %(user_id)s LIMIT 1), FALSE) AS favorite, - COALESCE((SELECT TRUE - FROM public.user_viewed_errors AS ve - WHERE errors.error_id = ve.error_id - AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed - FROM public.errors - WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""", - {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]), - "user_id": user_id}) - cur.execute(query=query) - statuses = cur.fetchall() + query = cur.mogrify( + """SELECT error_id, status, parent_error_id, payload, + COALESCE((SELECT TRUE + FROM public.user_favorite_errors AS fe + WHERE errors.error_id = fe.error_id + AND fe.user_id = %(userId)s LIMIT 1), FALSE) AS favorite, + COALESCE((SELECT TRUE + FROM public.user_viewed_errors AS ve + WHERE errors.error_id = ve.error_id + AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed + FROM public.errors + WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""", + {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]), + "userId": user_id}) + cur.execute(query=query) + statuses = cur.fetchall() statuses = { s["error_id"]: s for s in statuses } @@ -565,9 +585,9 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F r["chart"] = list(r["chart"]) for i in range(len(r["chart"])): r["chart"][i] = {"timestamp": r["chart"][i][0], "count": r["chart"][i][1]} - r["chart"] = dashboard.__complete_missing_steps(rows=r["chart"], start_time=data["startDate"], - end_time=data["endDate"], - density=density, neutral={"count": 0}) + r["chart"] = dashboard.__complete_missing_steps(rows=r["chart"], start_time=data.startDate, + end_time=data.endDate, + density=data.density, neutral={"count": 0}) offset = len(rows) rows = [r for r in rows if r["stack"] is None or (len(r["stack"]) == 0 or len(r["stack"]) > 1 @@ -593,7 +613,7 @@ def __save_stacktrace(error_id, data): def get_trace(project_id, error_id): - error = get(error_id=error_id) + error = get(error_id=error_id, family=False) if error is None: return {"errors": ["error not found"]} if error.get("source", "") != "js_exception": @@ -766,7 +786,7 @@ def format_first_stack_frame(error): def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()): with pg_client.PostgresClient() as cur: query = cur.mogrify( - """WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(user_id)s) + """WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(userId)s) SELECT COUNT(timed_errors.*) AS unresolved_and_unviewed FROM (SELECT root_error.error_id FROM events.errors @@ -780,7 +800,7 @@ def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTim AND user_viewed.error_id ISNULL LIMIT 1 ) AS timed_errors;""", - {"project_id": project_id, "user_id": user_id, "startTimestamp": startTimestamp, + {"project_id": project_id, "userId": user_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp}) cur.execute(query=query) row = cur.fetchone() From ad1caa0784a84090f9b7bbd67df1474e7df2b3f1 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 14 Mar 2022 17:33:34 +0100 Subject: [PATCH 22/60] feat(api): changed pagination for bookmarked sessions --- api/chalicelib/core/alerts_processor.py | 2 +- api/chalicelib/core/sessions.py | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/alerts_processor.py b/api/chalicelib/core/alerts_processor.py index 4fd7f27ec..e4579826f 100644 --- a/api/chalicelib/core/alerts_processor.py +++ b/api/chalicelib/core/alerts_processor.py @@ -104,7 +104,7 @@ def Build(a): a["filter"]["endDate"] = TimeUTC.now() full_args, query_part, sort = sessions.search_query_parts( data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False, - issue=None, project_id=a["projectId"], user_id=None) + issue=None, project_id=a["projectId"], user_id=None, favorite_only=False) subQ = f"""SELECT COUNT(session_id) AS value {query_part}""" else: diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index b213a55f6..aafa00570 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -170,7 +170,9 @@ def _isUndefined_operator(op: schemas.SearchEventOperator): @dev.timed def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, error_status="ALL", count_only=False, issue=None): - full_args, query_part, sort = search_query_parts(data, error_status, errors_only, issue, project_id, user_id) + full_args, query_part, sort = search_query_parts(data=data, error_status=error_status, errors_only=errors_only, + favorite_only=data.bookmarked, issue=issue, project_id=project_id, + user_id=user_id) if data.limit is not None and data.page is not None: full_args["sessions_limit_s"] = (data.page - 1) * data.limit full_args["sessions_limit_e"] = data.page * data.limit @@ -974,12 +976,12 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" extra_constraints.append("ufe.user_id = %(userId)s") # extra_constraints = [extra.decode('UTF-8') + "\n" for extra in extra_constraints] - if data.bookmarked and not errors_only and user_id is not None: + if favorite_only and not errors_only and user_id is not None: extra_from += """INNER JOIN (SELECT user_id, session_id FROM public.user_favorite_sessions WHERE user_id = %(userId)s) AS favorite_sessions USING (session_id)""" - elif not data.bookmarked and not errors_only and user_id is not None: + elif not favorite_only and not errors_only and user_id is not None: extra_from += """LEFT JOIN (SELECT user_id, session_id FROM public.user_favorite_sessions WHERE user_id = %(userId)s) AS favorite_sessions From 9065898980595e0b1299fce376511d47055aef17 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 14 Mar 2022 18:56:55 +0100 Subject: [PATCH 23/60] feat(api): FOSS pagination for errors --- api/chalicelib/core/errors.py | 55 +++++++++++++++++++++-------------- 1 file changed, 33 insertions(+), 22 deletions(-) diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index f64ea017f..b9780419d 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -465,12 +465,20 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s if data.order is not None: order = data.order extra_join = "" + params = { "startDate": data.startDate, "endDate": data.endDate, "project_id": project_id, "userId": user_id, "step_size": step_size} + if data.limit is not None and data.page is not None: + params["errors_offset"] = (data.page - 1) * data.limit + params["errors_limit"] = data.limit + else: + params["errors_offset"] = 0 + params["errors_limit"] = 200 + if error_ids is not None: params["error_ids"] = tuple(error_ids) pg_sub_query.append("error_id IN %(error_ids)s") @@ -478,7 +486,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s pg_sub_query.append("ufe.user_id = %(userId)s") extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" main_pg_query = f"""\ - SELECT error_id, + SELECT full_count, + error_id, name, message, users, @@ -486,20 +495,23 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s last_occurrence, first_occurrence, chart - FROM (SELECT error_id, - name, - message, - COUNT(DISTINCT user_uuid) AS users, - COUNT(DISTINCT session_id) AS sessions, - MAX(timestamp) AS max_datetime, - MIN(timestamp) AS min_datetime - FROM events.errors - INNER JOIN public.errors AS pe USING (error_id) - INNER JOIN public.sessions USING (session_id) - {extra_join} - WHERE {" AND ".join(pg_sub_query)} - GROUP BY error_id, name, message - ORDER BY {sort} {order}) AS details + FROM (SELECT COUNT(details) OVER () AS full_count, details.* + FROM (SELECT error_id, + name, + message, + COUNT(DISTINCT user_uuid) AS users, + COUNT(DISTINCT session_id) AS sessions, + MAX(timestamp) AS max_datetime, + MIN(timestamp) AS min_datetime + FROM events.errors + INNER JOIN public.errors AS pe USING (error_id) + INNER JOIN public.sessions USING (session_id) + {extra_join} + WHERE {" AND ".join(pg_sub_query)} + GROUP BY error_id, name, message + ORDER BY {sort} {order}) AS details + LIMIT %(errors_limit)s OFFSET %(errors_offset)s + ) AS details INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence, MIN(timestamp) AS first_occurrence FROM events.errors @@ -517,16 +529,14 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s # print("--------------------") # print(cur.mogrify(main_pg_query, params)) + # print("--------------------") + cur.execute(cur.mogrify(main_pg_query, params)) - total = cur.rowcount + rows = cur.fetchall() + total = 0 if len(rows) == 0 else rows[0]["full_count"] if flows: return {"data": {"count": total}} - row = cur.fetchone() - rows = [] - limit = 200 - while row is not None and len(rows) < limit: - rows.append(row) - row = cur.fetchone() + if total == 0: rows = [] else: @@ -552,6 +562,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s } for r in rows: + r.pop("full_count") if r["error_id"] in statuses: r["status"] = statuses[r["error_id"]]["status"] r["parent_error_id"] = statuses[r["error_id"]]["parent_error_id"] From 76314e2055003ff94c53c960aa8f805d613e5cbd Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 14 Mar 2022 19:06:58 +0100 Subject: [PATCH 24/60] feat(api): EE pagination for errors feat(api): FOSS refactored errors --- api/chalicelib/core/errors.py | 9 +++++---- ee/api/chalicelib/core/errors.py | 20 ++++++++++++++------ 2 files changed, 19 insertions(+), 10 deletions(-) diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index b9780419d..4b6554c2b 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -423,6 +423,10 @@ def __get_sort_key(key): @dev.timed def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, status="ALL", favorite_only=False): + empty_response = {"data": { + 'total': 0, + 'errors': [] + }} status = status.upper() if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']: return {"errors": ["invalid error status"]} @@ -447,10 +451,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True, error_status=status) if len(statuses) == 0: - return {"data": { - 'total': 0, - 'errors': [] - }} + return empty_response error_ids = [e["error_id"] for e in statuses] with pg_client.PostgresClient() as cur: if data.startDate is None: diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index dc4ea17b2..f70ac873e 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -483,13 +483,18 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s order = "DESC" if data.order is not None: order = data.order - extra_join = "" params = { "startDate": data.startDate, "endDate": data.endDate, "project_id": project_id, "userId": user_id, "step_size": step_size} + if data.limit is not None and data.page is not None: + params["errors_offset"] = (data.page - 1) * data.limit + params["errors_limit"] = data.limit + else: + params["errors_offset"] = 0 + params["errors_limit"] = 200 if favorite_only: cur.execute(cur.mogrify(f"""SELECT error_id FROM public.user_favorite_errors @@ -531,9 +536,10 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s WHERE {" AND ".join(ch_sub_query)} GROUP BY error_id, name, message ORDER BY {sort} {order} - LIMIT 200) AS details INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence - FROM errors - GROUP BY error_id) AS time_details + LIMIT %(errors_limit)s OFFSET %(errors_offset)s) AS details + INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence + FROM errors + GROUP BY error_id) AS time_details ON details.error_id=time_details.error_id INNER JOIN (SELECT error_id, groupArray([timestamp, count]) AS chart FROM (SELECT error_id, toUnixTimestamp(toStartOfInterval(datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, @@ -544,8 +550,10 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s ORDER BY timestamp) AS sub_table GROUP BY error_id) AS chart_details ON details.error_id=chart_details.error_id;""" - # print("--------------------") - # print(main_ch_query % params) + # print("------------") + # print(ch.client().substitute_params(main_ch_query, params)) + # print("------------") + rows = ch.execute(query=main_ch_query, params=params) if len(statuses) == 0: query = cur.mogrify( From bca45da791b42d0d29b87008f68c55ba24d8b12a Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 14 Mar 2022 19:11:55 +0100 Subject: [PATCH 25/60] change(ui) - funnels show step values --- .../Funnels/FunnelGraph/FunnelGraph.js | 41 ++++++++++++++++--- frontend/app/types/funnel.js | 9 ++-- 2 files changed, 41 insertions(+), 9 deletions(-) diff --git a/frontend/app/components/Funnels/FunnelGraph/FunnelGraph.js b/frontend/app/components/Funnels/FunnelGraph/FunnelGraph.js index 906843394..c110d16ab 100644 --- a/frontend/app/components/Funnels/FunnelGraph/FunnelGraph.js +++ b/frontend/app/components/Funnels/FunnelGraph/FunnelGraph.js @@ -6,9 +6,19 @@ import { connect } from 'react-redux'; import { setActiveStages } from 'Duck/funnels'; import { Styles } from '../../Dashboard/Widgets/common'; import { numberWithCommas } from 'App/utils' +import { truncate } from 'App/utils' const MIN_BAR_HEIGHT = 20; +function CustomTick(props) { + const { x, y, payload } = props; + return ( + + {payload.value} + + ); +} + function FunnelGraph(props) { const { data, activeStages, funnelId, liveFilters } = props; const [activeIndex, setActiveIndex] = useState(activeStages) @@ -118,13 +128,29 @@ function FunnelGraph(props) { ) } - const CustomTooltip = ({ active, payload, msg = '' }) => { + const CustomTooltip = (props) => { + const { payload } = props; + if (payload.length === 0) return null; + const { value, headerText } = payload[0].payload; + + // const value = payload[0].payload.value; + if (!value) return null; return ( -
-

{msg}

+
+
{headerText}
+ {value.map(i => ( +
{truncate(i, 30)}
+ ))}
- ); + ) }; + // const CustomTooltip = ({ active, payload, msg = '' }) => { + // return ( + //
+ //

{msg}

+ //
+ // ); + // }; const TEMP = {} @@ -152,7 +178,9 @@ function FunnelGraph(props) { background={'transparent'} > - {activeStages.length < 2 && 0 ? 'Select one more event.' : 'Select any two events to analyze in depth.'} />} />} + {/* {activeStages.length < 2 && 0 ? 'Select one more event.' : 'Select any two events to analyze in depth.'} />} />} */} + + } xAxisId={0} /> {/* { break; case "INPUT": str = 'Entered'; - break; + break; + case "CUSTOM": + str = 'Custom Event'; + break; } return `${str} ${operator}`; @@ -52,7 +55,7 @@ export default Record({ }, fromJS: ({ stages = [], filter, activeStages = null, ...rest }) => { let _stages = stages.map((stage, index) => { - // stage.label = getRedableName(stage.type, stage.value); + stage.headerText = getRedableName(stage.type, stage.value); stage.label = `Step ${index + 1}`; return stage; }); @@ -73,7 +76,7 @@ export default Record({ ...rest, stages: _stages.length > 0 ? _stages.map((stage, index) => { if (!stage) return; - // stage.label = getRedableName(stage); + stage.headerText = getRedableName(stage); stage.label = `Step ${index + 1}`; return stage; }) : [], From 817b2dffb7a9dbebd3c72bd8a45f6f65e08d8b33 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Tue, 15 Mar 2022 12:36:02 +0100 Subject: [PATCH 26/60] chore(helm): nginx service dedicated name block Signed-off-by: rjshrjndrn --- .../openreplay/charts/nginx-ingress/templates/service.yaml | 2 +- scripts/helmcharts/openreplay/charts/nginx-ingress/values.yaml | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/helmcharts/openreplay/charts/nginx-ingress/templates/service.yaml b/scripts/helmcharts/openreplay/charts/nginx-ingress/templates/service.yaml index 6c978f7f6..f20d4fc38 100644 --- a/scripts/helmcharts/openreplay/charts/nginx-ingress/templates/service.yaml +++ b/scripts/helmcharts/openreplay/charts/nginx-ingress/templates/service.yaml @@ -19,7 +19,7 @@ spec: - port: {{ .port }} targetPort: {{ .targetPort }} protocol: TCP - name: {{ .targetPort }} + name: {{ .name }} {{- end }} selector: {{- include "nginx-ingress.selectorLabels" . | nindent 4 }} diff --git a/scripts/helmcharts/openreplay/charts/nginx-ingress/values.yaml b/scripts/helmcharts/openreplay/charts/nginx-ingress/values.yaml index 6984c1938..1f7169c0d 100644 --- a/scripts/helmcharts/openreplay/charts/nginx-ingress/values.yaml +++ b/scripts/helmcharts/openreplay/charts/nginx-ingress/values.yaml @@ -42,8 +42,10 @@ service: ports: - port: 80 targetPort: http + name: http - port: 443 targetPort: https + name: https ingress: enabled: false From d05b4e5b121b3fa6f836ba8b232847d07953c201 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 15 Mar 2022 13:14:28 +0100 Subject: [PATCH 27/60] feat(api): transform update funnel response --- api/chalicelib/core/funnels.py | 1 + 1 file changed, 1 insertion(+) diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py index adcb1167e..e63ea6efc 100644 --- a/api/chalicelib/core/funnels.py +++ b/api/chalicelib/core/funnels.py @@ -108,6 +108,7 @@ def update(funnel_id, user_id, project_id, name=None, filter=None, is_public=Non r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"]) r = helper.dict_to_camel_case(r) r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"]) + r["filter"] = helper.old_search_payload_to_flat(r["filter"]) return {"data": r} From 1045aeca0f45c69315559e9224eadd347fbe776f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 15 Mar 2022 15:55:36 +0100 Subject: [PATCH 28/60] feat(api): FOSS changed search-errors feat(api): FOSS changed search-errors-payload feat(api): FOSS support search-errors-by-status without any extra query --- api/chalicelib/core/errors.py | 32 ++++++++++++++++++-------------- api/chalicelib/core/sessions.py | 17 +++++------------ api/routers/core.py | 7 ++----- api/schemas.py | 8 ++++++++ 4 files changed, 33 insertions(+), 31 deletions(-) diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index 4b6554c2b..cbecad113 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -2,7 +2,7 @@ import json import schemas from chalicelib.core import sourcemaps, sessions -from chalicelib.utils import pg_client, helper, dev +from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.metrics_helper import __get_step_size @@ -399,7 +399,10 @@ def get_details_chart(project_id, error_id, user_id, **data): def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate", endTime_arg_name="endDate", chart=False, step_size_name="step_size", project_key="project_id"): - ch_sub_query = [f"{project_key} =%(project_id)s"] + if project_key is None: + ch_sub_query = [] + else: + ch_sub_query = [f"{project_key} =%(project_id)s"] if time_constraint: ch_sub_query += [f"timestamp >= %({startTime_arg_name})s", f"timestamp < %({endTime_arg_name})s"] @@ -421,15 +424,12 @@ def __get_sort_key(key): }.get(key, 'max_datetime') -@dev.timed -def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, status="ALL", favorite_only=False): +def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): empty_response = {"data": { 'total': 0, 'errors': [] }} - status = status.upper() - if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']: - return {"errors": ["invalid error status"]} + platform = None for f in data.filters: if f.type == schemas.FilterType.platform and len(f.value) > 0: @@ -437,8 +437,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id") pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'", "pe.project_id=%(project_id)s"] - pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True) - pg_sub_query_chart.append("source ='js_exception'") + pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True, project_key=None) + # pg_sub_query_chart.append("source ='js_exception'") pg_sub_query_chart.append("errors.error_id =details.error_id") statuses = [] error_ids = None @@ -446,13 +446,14 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s data.startDate = TimeUTC.now(-30) if data.endDate is None: data.endDate = TimeUTC.now(1) - if len(data.events) > 0 or len(data.filters) > 0 or status != "ALL": + if len(data.events) > 0 or len(data.filters) > 0: + print("-- searching for sessions before errors") # if favorite_only=True search for sessions associated with favorite_error statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True, - error_status=status) + error_status=data.status) if len(statuses) == 0: return empty_response - error_ids = [e["error_id"] for e in statuses] + error_ids = [e["errorId"] for e in statuses] with pg_client.PostgresClient() as cur: if data.startDate is None: data.startDate = TimeUTC.now(-7) @@ -473,6 +474,9 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s "project_id": project_id, "userId": user_id, "step_size": step_size} + if data.status != schemas.ErrorStatus.all: + pg_sub_query.append("status = %(error_status)s") + params["error_status"] = data.status if data.limit is not None and data.page is not None: params["errors_offset"] = (data.page - 1) * data.limit params["errors_limit"] = data.limit @@ -483,7 +487,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s if error_ids is not None: params["error_ids"] = tuple(error_ids) pg_sub_query.append("error_id IN %(error_ids)s") - if favorite_only: + if data.bookmarked: pg_sub_query.append("ufe.user_id = %(userId)s") extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" main_pg_query = f"""\ @@ -522,7 +526,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s COUNT(session_id) AS count FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp LEFT JOIN LATERAL (SELECT DISTINCT session_id - FROM events.errors INNER JOIN public.errors AS m_errors USING (error_id) + FROM events.errors WHERE {" AND ".join(pg_sub_query_chart)} ) AS sessions ON (TRUE) GROUP BY timestamp diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index aafa00570..45a1bd52c 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -235,24 +235,16 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e # print("--------------------") cur.execute(main_query) + if errors_only: + return helper.list_to_camel_case(cur.fetchall()) + sessions = cur.fetchone() if count_only: return helper.dict_to_camel_case(sessions) total = sessions["count"] sessions = sessions["sessions"] - # sessions = [] - # total = cur.rowcount - # row = cur.fetchone() - # limit = 200 - # while row is not None and len(sessions) < limit: - # if row.get("favorite"): - # limit += 1 - # sessions.append(row) - # row = cur.fetchone() - if errors_only: - return sessions if data.group_by_user: for i, s in enumerate(sessions): sessions[i] = {**s.pop("last_session")[0], **s} @@ -969,9 +961,10 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr if errors_only: extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)" extra_constraints.append("ser.source = 'js_exception'") + extra_constraints.append("ser.project_id = %(project_id)s") if error_status != "ALL": extra_constraints.append("ser.status = %(error_status)s") - full_args["status"] = error_status.lower() + full_args["error_status"] = error_status.lower() if favorite_only: extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" extra_constraints.append("ufe.user_id = %(userId)s") diff --git a/api/routers/core.py b/api/routers/core.py index 999222c3e..8d3f3ddf8 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -902,12 +902,9 @@ def edit_client(data: schemas.UpdateTenantSchema = Body(...), @app.post('/{projectId}/errors/search', tags=['errors']) -def errors_search(projectId: int, status: str = "ALL", favorite: Union[str, bool] = False, - data: schemas.SearchErrorsSchema = Body(...), +def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - if isinstance(favorite, str): - favorite = True if len(favorite) == 0 else False - return errors.search(data, projectId, user_id=context.user_id, status=status, favorite_only=favorite) + return errors.search(data, projectId, user_id=context.user_id) @app.get('/{projectId}/errors/stats', tags=['errors']) diff --git a/api/schemas.py b/api/schemas.py index 767a53866..76fc754bb 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -690,8 +690,16 @@ class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema): rangeValue: Optional[str] = Field(None) +class ErrorStatus(str, Enum): + all = 'all' + unresolved = 'unresolved' + resolved = 'resolved' + ignored = 'ignored' + + class SearchErrorsSchema(SessionsSearchPayloadSchema): density: Optional[int] = Field(7) + status: Optional[ErrorStatus] = Field(default=ErrorStatus.all) class MetricPayloadSchema(BaseModel): From 3cd656c752ad8ea23e00ed54f93a41ae68efc120 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 15 Mar 2022 17:00:13 +0100 Subject: [PATCH 29/60] feat(api): EE changed search-errors --- ee/api/chalicelib/core/errors.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index f70ac873e..414c20e13 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -444,14 +444,11 @@ def __get_sort_key(key): }.get(key, 'max_datetime') -def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, status="ALL", favorite_only=False): +def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): empty_response = {"data": { 'total': 0, 'errors': [] }} - status = status.upper() - if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']: - return {"errors": ["invalid error status"]} platform = None for f in data.filters: if f.type == schemas.FilterType.platform and len(f.value) > 0: @@ -460,17 +457,19 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s ch_sub_query.append("source ='js_exception'") statuses = [] error_ids = None - if data.startDate is None: + # Clickhouse keeps data for the past month only, so no need to search beyond that + if data.startDate is None or data.startDate < TimeUTC.now(delta_days=-31): data.startDate = TimeUTC.now(-30) if data.endDate is None: data.endDate = TimeUTC.now(1) - if len(data.events) > 0 or len(data.filters) > 0 or status != "ALL": + if len(data.events) > 0 or len(data.filters) > 0 or data.status != "ALL": + print("-- searching for sessions before errors") # if favorite_only=True search for sessions associated with favorite_error statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True, - error_status=status, favorite_only=favorite_only) + error_status=data.status) if len(statuses) == 0: return empty_response - error_ids = [e["error_id"] for e in statuses] + error_ids = [e["errorId"] for e in statuses] with ch_client.ClickHouseClient() as ch, pg_client.PostgresClient() as cur: if data.startDate is None: data.startDate = TimeUTC.now(-7) @@ -495,7 +494,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s else: params["errors_offset"] = 0 params["errors_limit"] = 200 - if favorite_only: + if data.bookmarked: cur.execute(cur.mogrify(f"""SELECT error_id FROM public.user_favorite_errors WHERE user_id = %(userId)s @@ -573,7 +572,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s cur.execute(query=query) statuses = cur.fetchall() statuses = { - s["error_id"]: s for s in statuses + s["errorId"]: s for s in statuses } for r in rows: From 9a6c9700ebdd12a952c6e6c083da1b00c3ae4aee Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 15 Mar 2022 17:14:22 +0100 Subject: [PATCH 30/60] feat(api): FOSS changed search-error feat(api): EE changed search-error --- api/chalicelib/core/errors.py | 6 +++--- ee/api/chalicelib/core/errors.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index cbecad113..4d568dc59 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -561,16 +561,16 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]), "user_id": user_id}) cur.execute(query=query) - statuses = cur.fetchall() + statuses = helper.list_to_camel_case(cur.fetchall()) statuses = { - s["error_id"]: s for s in statuses + s["errorId"]: s for s in statuses } for r in rows: r.pop("full_count") if r["error_id"] in statuses: r["status"] = statuses[r["error_id"]]["status"] - r["parent_error_id"] = statuses[r["error_id"]]["parent_error_id"] + r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"] r["favorite"] = statuses[r["error_id"]]["favorite"] r["viewed"] = statuses[r["error_id"]]["viewed"] r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"] diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index 414c20e13..396c61d83 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -578,7 +578,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): for r in rows: if r["error_id"] in statuses: r["status"] = statuses[r["error_id"]]["status"] - r["parent_error_id"] = statuses[r["error_id"]]["parent_error_id"] + r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"] r["favorite"] = statuses[r["error_id"]]["favorite"] r["viewed"] = statuses[r["error_id"]]["viewed"] r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"] From bae3a3c19ebe713a8b5b57cc047bac0f048165c6 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 15 Mar 2022 17:28:58 +0100 Subject: [PATCH 31/60] feat(api): EE changed search-error --- ee/api/chalicelib/core/errors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index 396c61d83..fa99f1e44 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -462,7 +462,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): data.startDate = TimeUTC.now(-30) if data.endDate is None: data.endDate = TimeUTC.now(1) - if len(data.events) > 0 or len(data.filters) > 0 or data.status != "ALL": + if len(data.events) > 0 or len(data.filters) > 0 or data.status != schemas.ErrorStatus.all: print("-- searching for sessions before errors") # if favorite_only=True search for sessions associated with favorite_error statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True, From 2a0473c7dab34617e2698c221c8125149eb319f8 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 15 Mar 2022 17:30:25 +0100 Subject: [PATCH 32/60] feat(api): EE changed search-error --- ee/api/chalicelib/core/errors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index 396c61d83..fa99f1e44 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -462,7 +462,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): data.startDate = TimeUTC.now(-30) if data.endDate is None: data.endDate = TimeUTC.now(1) - if len(data.events) > 0 or len(data.filters) > 0 or data.status != "ALL": + if len(data.events) > 0 or len(data.filters) > 0 or data.status != schemas.ErrorStatus.all: print("-- searching for sessions before errors") # if favorite_only=True search for sessions associated with favorite_error statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True, From 5946af857a400597521766d2f81ff92f3d8237f9 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 15 Mar 2022 17:32:50 +0100 Subject: [PATCH 33/60] feat(api): EE changed search-error --- ee/api/chalicelib/core/errors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index fa99f1e44..88710284b 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -570,7 +570,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]), "userId": user_id}) cur.execute(query=query) - statuses = cur.fetchall() + statuses = helper.list_to_camel_case(cur.fetchall()) statuses = { s["errorId"]: s for s in statuses } From 7168e660cc8fa66a46a26667d15a7a00c49cfaad Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 15 Mar 2022 17:43:06 +0100 Subject: [PATCH 34/60] feat(ui) - errors pagination --- frontend/app/components/Errors/Errors.js | 72 +++------ frontend/app/components/Errors/List/List.js | 139 ++++++++++-------- .../ui/DropdownPlain/DropdownPlain.js | 2 +- frontend/app/duck/errors.js | 72 ++++++--- frontend/app/duck/search.js | 2 +- frontend/app/types/errorInfo.js | 1 + frontend/app/utils.js | 1 - 7 files changed, 156 insertions(+), 133 deletions(-) diff --git a/frontend/app/components/Errors/Errors.js b/frontend/app/components/Errors/Errors.js index 4eb671cf5..c29a5f200 100644 --- a/frontend/app/components/Errors/Errors.js +++ b/frontend/app/components/Errors/Errors.js @@ -1,23 +1,18 @@ import { connect } from 'react-redux'; import withSiteIdRouter from 'HOCs/withSiteIdRouter'; import withPermissions from 'HOCs/withPermissions' -import { UNRESOLVED, RESOLVED, IGNORED } from "Types/errorInfo"; -import { getRE } from 'App/utils'; -import { fetchBookmarks } from "Duck/errors"; +import { UNRESOLVED, RESOLVED, IGNORED, BOOKMARK } from "Types/errorInfo"; +import { fetchBookmarks, editOptions } from "Duck/errors"; import { applyFilter } from 'Duck/filters'; import { fetchList as fetchSlackList } from 'Duck/integrations/slack'; import { errors as errorsRoute, isRoute } from "App/routes"; -import EventFilter from 'Components/BugFinder/EventFilter'; import DateRange from 'Components/BugFinder/DateRange'; import withPageTitle from 'HOCs/withPageTitle'; -import { SavedSearchList } from 'UI'; - import List from './List/List'; import ErrorInfo from './Error/ErrorInfo'; import Header from './Header'; import SideMenuSection from './SideMenu/SideMenuSection'; -import SideMenuHeader from './SideMenu/SideMenuHeader'; import SideMenuDividedItem from './SideMenu/SideMenuDividedItem'; const ERRORS_ROUTE = errorsRoute(); @@ -39,44 +34,26 @@ function getStatusLabel(status) { @withSiteIdRouter @connect(state => ({ list: state.getIn([ "errors", "list" ]), + status: state.getIn([ "errors", "options", "status" ]), }), { fetchBookmarks, applyFilter, fetchSlackList, + editOptions, }) @withPageTitle("Errors - OpenReplay") export default class Errors extends React.PureComponent { - state = { - status: UNRESOLVED, - bookmarksActive: false, - currentList: this.props.list.filter(e => e.status === UNRESOLVED), - filter: '', + constructor(props) { + super(props) + this.state = { + filter: '', + } } componentDidMount() { this.props.fetchSlackList(); // Delete after implementing cache } - onFilterChange = ({ target: { value } }) => this.setState({ filter: value }) - - componentDidUpdate(prevProps, prevState) { - const { bookmarksActive, status, filter } = this.state; - const { list } = this.props; - if (prevProps.list !== list - || prevState.status !== status - || prevState.bookmarksActive !== bookmarksActive - || prevState.filter !== filter) { - const unfiltered = bookmarksActive - ? list - : list.filter(e => e.status === status); - const filterRE = getRE(filter); - this.setState({ - currentList: unfiltered - .filter(e => filterRE.test(e.name) || filterRE.test(e.message)), - }) - } - } - ensureErrorsPage() { const { history } = this.props; if (!isRoute(ERRORS_ROUTE, history.location.pathname)) { @@ -85,22 +62,11 @@ export default class Errors extends React.PureComponent { } onStatusItemClick = ({ key }) => { - if (this.state.bookmarksActive) { - this.props.applyFilter(); - } - this.setState({ - status: key, - bookmarksActive: false, - }); - this.ensureErrorsPage(); + this.props.editOptions({ status: key }); } onBookmarksClick = () => { - this.setState({ - bookmarksActive: true, - }); - this.props.fetchBookmarks(); - this.ensureErrorsPage(); + this.props.editOptions({ status: BOOKMARK }); } @@ -110,8 +76,9 @@ export default class Errors extends React.PureComponent { match: { params: { errorId } }, + status, + list, } = this.props; - const { status, bookmarksActive, currentList } = this.state; return (
@@ -137,14 +104,14 @@ export default class Errors extends React.PureComponent { icon: "ban", label: getStatusLabel(IGNORED), active: status === IGNORED, - } + } ]} />
@@ -154,8 +121,8 @@ export default class Errors extends React.PureComponent { <>
Seen in @@ -164,12 +131,11 @@ export default class Errors extends React.PureComponent {
: - + }
diff --git a/frontend/app/components/Errors/List/List.js b/frontend/app/components/Errors/List/List.js index cb0ffd55a..2492782c8 100644 --- a/frontend/app/components/Errors/List/List.js +++ b/frontend/app/components/Errors/List/List.js @@ -1,53 +1,62 @@ import cn from 'classnames'; import { connect } from 'react-redux'; import { Set, List as ImmutableList } from "immutable"; -import { NoContent, Loader, Checkbox, LoadMoreButton, IconButton, Input, DropdownPlain } from 'UI'; -import { merge, resolve, unresolve, ignore, updateCurrentPage } from "Duck/errors"; +import { NoContent, Loader, Checkbox, LoadMoreButton, IconButton, Input, DropdownPlain, Pagination } from 'UI'; +import { merge, resolve, unresolve, ignore, updateCurrentPage, editOptions } from "Duck/errors"; import { applyFilter } from 'Duck/filters'; import { IGNORED, RESOLVED, UNRESOLVED } from 'Types/errorInfo'; import SortDropdown from 'Components/BugFinder/Filters/SortDropdown'; import Divider from 'Components/Errors/ui/Divider'; import ListItem from './ListItem/ListItem'; +import { debounce } from 'App/utils'; const PER_PAGE = 5; -const DEFAULT_SORT = 'lastOccurrence'; -const DEFAULT_ORDER = 'desc'; const sortOptionsMap = { 'lastOccurrence-desc': 'Last Occurrence', - 'firstOccurrence-desc': 'First Occurrence', - 'sessions-asc': 'Sessions Ascending', - 'sessions-desc': 'Sessions Descending', - 'users-asc': 'Users Ascending', - 'users-desc': 'Users Descending', + 'firstOccurrence-desc': 'First Occurrence', + 'sessions-asc': 'Sessions Ascending', + 'sessions-desc': 'Sessions Descending', + 'users-asc': 'Users Ascending', + 'users-desc': 'Users Descending', }; const sortOptions = Object.entries(sortOptionsMap) .map(([ value, text ]) => ({ value, text })); - @connect(state => ({ loading: state.getIn([ "errors", "loading" ]), resolveToggleLoading: state.getIn(["errors", "resolve", "loading"]) || state.getIn(["errors", "unresolve", "loading"]), ignoreLoading: state.getIn([ "errors", "ignore", "loading" ]), mergeLoading: state.getIn([ "errors", "merge", "loading" ]), - currentPage: state.getIn(["errors", "currentPage"]), + currentPage: state.getIn(["errors", "currentPage"]), + total: state.getIn([ 'errors', 'totalCount' ]), + sort: state.getIn([ 'errors', 'options', 'sort' ]), + order: state.getIn([ 'errors', 'options', 'order' ]), + query: state.getIn([ "errors", "options", "query" ]), }), { merge, resolve, unresolve, ignore, applyFilter, - updateCurrentPage, + updateCurrentPage, + editOptions, }) export default class List extends React.PureComponent { - state = { - checkedAll: false, - checkedIds: Set(), - sort: {} + constructor(props) { + super(props) + this.state = { + checkedAll: false, + checkedIds: Set(), + query: props.query, + } + this.debounceFetch = debounce(this.props.editOptions, 1000); } - + componentDidMount() { - this.props.applyFilter({ sort: DEFAULT_SORT, order: DEFAULT_ORDER, events: ImmutableList(), filters: ImmutableList() }); + if (this.props.list.size === 0) { + this.props.applyFilter({ }); + } } check = ({ errorId }) => { @@ -111,8 +120,14 @@ export default class List extends React.PureComponent { writeOption = (e, { name, value }) => { const [ sort, order ] = value.split('-'); - const sign = order === 'desc' ? -1 : 1; - this.setState({ sort: { sort, order }}) + if (name === 'sort') { + this.props.editOptions({ sort, order }); + } + } + + onQueryChange = (e, { value }) => { + this.setState({ query: value }); + this.debounceFetch({ query: value }); } render() { @@ -123,19 +138,18 @@ export default class List extends React.PureComponent { ignoreLoading, resolveToggleLoading, mergeLoading, - onFilterChange, - currentPage, + currentPage, + total, + sort, + order, } = this.props; const { checkedAll, checkedIds, - sort + query, } = this.state; const someLoading = loading || ignoreLoading || resolveToggleLoading || mergeLoading; const currentCheckedIds = this.currentCheckedIds(); - const displayedCount = Math.min(currentPage * PER_PAGE, list.size); - let _list = sort.sort ? list.sortBy(i => i[sort.sort]) : list; - _list = sort.order === 'desc' ? _list.reverse() : _list; return (
@@ -182,33 +196,35 @@ export default class List extends React.PureComponent { }
- Sort By + Sort By - + -
-
- - - - { _list.take(displayedCount).map(e => - <> + className="input-small ml-3" + placeholder="Filter by Name or Message" + icon="search" + iconPosition="left" + name="filter" + onChange={ this.onQueryChange } + value={query} + /> + + + + + + { list.map(e => +
- +
)} - -
-
+
+ this.props.updateCurrentPage(page)} + limit={PER_PAGE} + debounceRequest={500} + /> +
+
+
); } diff --git a/frontend/app/components/ui/DropdownPlain/DropdownPlain.js b/frontend/app/components/ui/DropdownPlain/DropdownPlain.js index 389b75b93..8f11a14fb 100644 --- a/frontend/app/components/ui/DropdownPlain/DropdownPlain.js +++ b/frontend/app/components/ui/DropdownPlain/DropdownPlain.js @@ -21,7 +21,7 @@ function DropdownPlain({ name, label, options, onChange, defaultValue, wrapperSt options={ options } onChange={ onChange } defaultValue={ defaultValue || options[ 0 ].value } - icon={null} + // icon={null} disabled={disabled} icon={ } /> diff --git a/frontend/app/duck/errors.js b/frontend/app/duck/errors.js index 9e7b552f2..1b099d6d5 100644 --- a/frontend/app/duck/errors.js +++ b/frontend/app/duck/errors.js @@ -1,13 +1,18 @@ import { List, Map } from 'immutable'; import { clean as cleanParams } from 'App/api_client'; -import ErrorInfo, { RESOLVED, UNRESOLVED, IGNORED } from 'Types/errorInfo'; +import ErrorInfo, { RESOLVED, UNRESOLVED, IGNORED, BOOKMARK } from 'Types/errorInfo'; import { createFetch, fetchListType, fetchType } from './funcTools/crud'; import { createRequestReducer, ROOT_KEY } from './funcTools/request'; import { array, request, success, failure, createListUpdater, mergeReducers } from './funcTools/tools'; +import { reduceThenFetchResource } from './search' const name = "error"; const idKey = "errorId"; +const PER_PAGE = 5; +const DEFAULT_SORT = 'lastOccurrence'; +const DEFAULT_ORDER = 'desc'; +const EDIT_OPTIONS = `${name}/EDIT_OPTIONS`; const FETCH_LIST = fetchListType(name); const FETCH = fetchType(name); const FETCH_NEW_ERRORS_COUNT = fetchType('errors/FETCH_NEW_ERRORS_COUNT'); @@ -18,6 +23,7 @@ const MERGE = "errors/MERGE"; const TOGGLE_FAVORITE = "errors/TOGGLE_FAVORITE"; const FETCH_TRACE = "errors/FETCH_TRACE"; const UPDATE_CURRENT_PAGE = "errors/UPDATE_CURRENT_PAGE"; +const UPDATE_KEY = `${name}/UPDATE_KEY`; function chartWrapper(chart = []) { return chart.map(point => ({ ...point, count: Math.max(point.count, 0) })); @@ -35,13 +41,23 @@ const initialState = Map({ instanceTrace: List(), stats: Map(), sourcemapUploaded: true, - currentPage: 1, + currentPage: 1, + options: Map({ + sort: DEFAULT_SORT, + order: DEFAULT_ORDER, + status: UNRESOLVED, + query: '', + }), + // sort: DEFAULT_SORT, + // order: DEFAULT_ORDER, }); function reducer(state = initialState, action = {}) { let updError; switch (action.type) { + case EDIT_OPTIONS: + return state.mergeIn(["options"], action.instance); case success(FETCH): return state.set("instance", ErrorInfo(action.data)); case success(FETCH_TRACE): @@ -69,8 +85,10 @@ function reducer(state = initialState, action = {}) { return state.update("list", list => list.filter(e => !ids.includes(e.errorId))); case success(FETCH_NEW_ERRORS_COUNT): return state.set('stats', action.data); - case UPDATE_CURRENT_PAGE: - return state.set('currentPage', action.page); + case UPDATE_KEY: + return state.set(action.key, action.value); + case UPDATE_CURRENT_PAGE: + return state.set('currentPage', action.page); } return state; } @@ -106,14 +124,31 @@ export function fetchTrace(id) { } } -export function fetchList(params = {}, clear = false) { - return { - types: array(FETCH_LIST), - call: client => client.post('/errors/search', params), - clear, - params: cleanParams(params), - }; -} +export const fetchList = (params = {}, clear = false) => (dispatch, getState) => { + params.page = getState().getIn(['errors', 'currentPage']); + params.limit = PER_PAGE; + + const options = getState().getIn(['errors', 'options']); + if (options.get("status") === BOOKMARK) { + options.bookmarked = true; + } + + return dispatch({ + types: array(FETCH_LIST), + call: client => client.post('/errors/search', { ...params, ...options }), + clear, + params: cleanParams(params), + }); +}; + +// export function fetchList(params = {}, clear = false) { +// return { +// types: array(FETCH_LIST), +// call: client => client.post('/errors/search', params), +// clear, +// params: cleanParams(params), +// }; +// } export function fetchBookmarks() { return { @@ -169,9 +204,12 @@ export function fetchNewErrorsCount(params = {}) { } } -export function updateCurrentPage(page) { - return { - type: 'errors/UPDATE_CURRENT_PAGE', +export const updateCurrentPage = reduceThenFetchResource((page) => ({ + type: UPDATE_CURRENT_PAGE, page, - }; -} +})); + +export const editOptions = reduceThenFetchResource((instance) => ({ + type: EDIT_OPTIONS, + instance +})); \ No newline at end of file diff --git a/frontend/app/duck/search.js b/frontend/app/duck/search.js index f4e480b48..9106227bb 100644 --- a/frontend/app/duck/search.js +++ b/frontend/app/duck/search.js @@ -126,7 +126,7 @@ export const filterMap = ({category, value, key, operator, sourceOperator, sourc filters: filters ? filters.map(filterMap) : [], }); -const reduceThenFetchResource = actionCreator => (...args) => (dispatch, getState) => { +export const reduceThenFetchResource = actionCreator => (...args) => (dispatch, getState) => { dispatch(actionCreator(...args)); const filter = getState().getIn([ 'search', 'instance']).toData(); diff --git a/frontend/app/types/errorInfo.js b/frontend/app/types/errorInfo.js index efcb5154e..364fa8e65 100644 --- a/frontend/app/types/errorInfo.js +++ b/frontend/app/types/errorInfo.js @@ -5,6 +5,7 @@ import Session from './session'; export const RESOLVED = "resolved"; export const UNRESOLVED = "unresolved"; export const IGNORED = "ignored"; +export const BOOKMARK = "bookmark"; function getStck0InfoString(stack) { diff --git a/frontend/app/utils.js b/frontend/app/utils.js index 795f02d49..5ea05633c 100644 --- a/frontend/app/utils.js +++ b/frontend/app/utils.js @@ -237,6 +237,5 @@ export const isGreaterOrEqualVersion = (version, compareTo) => { export const sliceListPerPage = (list, page, perPage = 10) => { const start = page * perPage; const end = start + perPage; - console.log(start, end) return list.slice(start, end); } \ No newline at end of file From 4946dd2036b2f3385539a997c39f1195e38ab185 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 15 Mar 2022 17:44:09 +0100 Subject: [PATCH 35/60] feat(ui) - errors pagination --- frontend/app/components/Errors/List/List.js | 2 +- frontend/app/duck/errors.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/app/components/Errors/List/List.js b/frontend/app/components/Errors/List/List.js index 2492782c8..5fb8e49f6 100644 --- a/frontend/app/components/Errors/List/List.js +++ b/frontend/app/components/Errors/List/List.js @@ -10,7 +10,7 @@ import Divider from 'Components/Errors/ui/Divider'; import ListItem from './ListItem/ListItem'; import { debounce } from 'App/utils'; -const PER_PAGE = 5; +const PER_PAGE = 10; const sortOptionsMap = { 'lastOccurrence-desc': 'Last Occurrence', 'firstOccurrence-desc': 'First Occurrence', diff --git a/frontend/app/duck/errors.js b/frontend/app/duck/errors.js index 1b099d6d5..0d2dede92 100644 --- a/frontend/app/duck/errors.js +++ b/frontend/app/duck/errors.js @@ -8,7 +8,7 @@ import { reduceThenFetchResource } from './search' const name = "error"; const idKey = "errorId"; -const PER_PAGE = 5; +const PER_PAGE = 10; const DEFAULT_SORT = 'lastOccurrence'; const DEFAULT_ORDER = 'desc'; From 3ed1773d39a8b7b08f1e46dd72068939dd9dbbfe Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 15 Mar 2022 18:03:15 +0100 Subject: [PATCH 36/60] feat(api): EE changed search-errors --- ee/api/chalicelib/core/errors.py | 198 ++++++++++++++++++++++++++++++- 1 file changed, 195 insertions(+), 3 deletions(-) diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index 88710284b..2705a1115 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -3,7 +3,7 @@ import json import schemas from chalicelib.core import dashboard from chalicelib.core import sourcemaps, sessions -from chalicelib.utils import ch_client +from chalicelib.utils import ch_client, metrics_helper from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC @@ -424,9 +424,9 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n if time_constraint: ch_sub_query += [f"datetime >= toDateTime(%({startTime_arg_name})s/1000)", f"datetime < toDateTime(%({endTime_arg_name})s/1000)"] - if platform == 'mobile': + if platform == schemas.PlatformType.mobile: ch_sub_query.append("user_device_type = 'mobile'") - elif platform == 'desktop': + elif platform == schemas.PlatformType.desktop: ch_sub_query.append("user_device_type = 'desktop'") return ch_sub_query @@ -444,11 +444,203 @@ def __get_sort_key(key): }.get(key, 'max_datetime') +def __get_basic_constraints_pg(platform=None, time_constraint=True, startTime_arg_name="startDate", + endTime_arg_name="endDate", chart=False, step_size_name="step_size", + project_key="project_id"): + if project_key is None: + ch_sub_query = [] + else: + ch_sub_query = [f"{project_key} =%(project_id)s"] + if time_constraint: + ch_sub_query += [f"timestamp >= %({startTime_arg_name})s", + f"timestamp < %({endTime_arg_name})s"] + if chart: + ch_sub_query += [f"timestamp >= generated_timestamp", + f"timestamp < generated_timestamp + %({step_size_name})s"] + if platform == schemas.PlatformType.mobile: + ch_sub_query.append("user_device_type = 'mobile'") + elif platform == schemas.PlatformType.desktop: + ch_sub_query.append("user_device_type = 'desktop'") + return ch_sub_query + + def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): empty_response = {"data": { 'total': 0, 'errors': [] }} + + platform = None + for f in data.filters: + if f.type == schemas.FilterType.platform and len(f.value) > 0: + platform = f.value[0] + pg_sub_query = __get_basic_constraints_pg(platform, project_key="sessions.project_id") + pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'", + "pe.project_id=%(project_id)s"] + pg_sub_query_chart = __get_basic_constraints_pg(platform, time_constraint=False, chart=True, project_key=None) + # pg_sub_query_chart.append("source ='js_exception'") + pg_sub_query_chart.append("errors.error_id =details.error_id") + statuses = [] + error_ids = None + if data.startDate is None: + data.startDate = TimeUTC.now(-30) + if data.endDate is None: + data.endDate = TimeUTC.now(1) + if len(data.events) > 0 or len(data.filters) > 0: + print("-- searching for sessions before errors") + # if favorite_only=True search for sessions associated with favorite_error + statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True, + error_status=data.status) + if len(statuses) == 0: + return empty_response + error_ids = [e["errorId"] for e in statuses] + with pg_client.PostgresClient() as cur: + if data.startDate is None: + data.startDate = TimeUTC.now(-7) + if data.endDate is None: + data.endDate = TimeUTC.now() + step_size = metrics_helper.__get_step_size(data.startDate, data.endDate, data.density, factor=1) + sort = __get_sort_key('datetime') + if data.sort is not None: + sort = __get_sort_key(data.sort) + order = "DESC" + if data.order is not None: + order = data.order + extra_join = "" + + params = { + "startDate": data.startDate, + "endDate": data.endDate, + "project_id": project_id, + "userId": user_id, + "step_size": step_size} + if data.status != schemas.ErrorStatus.all: + pg_sub_query.append("status = %(error_status)s") + params["error_status"] = data.status + if data.limit is not None and data.page is not None: + params["errors_offset"] = (data.page - 1) * data.limit + params["errors_limit"] = data.limit + else: + params["errors_offset"] = 0 + params["errors_limit"] = 200 + + if error_ids is not None: + params["error_ids"] = tuple(error_ids) + pg_sub_query.append("error_id IN %(error_ids)s") + if data.bookmarked: + pg_sub_query.append("ufe.user_id = %(userId)s") + extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" + main_pg_query = f"""\ + SELECT full_count, + error_id, + name, + message, + users, + sessions, + last_occurrence, + first_occurrence, + chart + FROM (SELECT COUNT(details) OVER () AS full_count, details.* + FROM (SELECT error_id, + name, + message, + COUNT(DISTINCT user_uuid) AS users, + COUNT(DISTINCT session_id) AS sessions, + MAX(timestamp) AS max_datetime, + MIN(timestamp) AS min_datetime + FROM events.errors + INNER JOIN public.errors AS pe USING (error_id) + INNER JOIN public.sessions USING (session_id) + {extra_join} + WHERE {" AND ".join(pg_sub_query)} + GROUP BY error_id, name, message + ORDER BY {sort} {order}) AS details + LIMIT %(errors_limit)s OFFSET %(errors_offset)s + ) AS details + INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence, + MIN(timestamp) AS first_occurrence + FROM events.errors + WHERE errors.error_id = details.error_id) AS time_details ON (TRUE) + INNER JOIN LATERAL (SELECT jsonb_agg(chart_details) AS chart + FROM (SELECT generated_timestamp AS timestamp, + COUNT(session_id) AS count + FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL (SELECT DISTINCT session_id + FROM events.errors + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS sessions ON (TRUE) + GROUP BY timestamp + ORDER BY timestamp) AS chart_details) AS chart_details ON (TRUE);""" + + # print("--------------------") + # print(cur.mogrify(main_pg_query, params)) + # print("--------------------") + + cur.execute(cur.mogrify(main_pg_query, params)) + rows = cur.fetchall() + total = 0 if len(rows) == 0 else rows[0]["full_count"] + if flows: + return {"data": {"count": total}} + + if total == 0: + rows = [] + else: + if len(statuses) == 0: + query = cur.mogrify( + """SELECT error_id, status, parent_error_id, payload, + COALESCE((SELECT TRUE + FROM public.user_favorite_errors AS fe + WHERE errors.error_id = fe.error_id + AND fe.user_id = %(user_id)s LIMIT 1), FALSE) AS favorite, + COALESCE((SELECT TRUE + FROM public.user_viewed_errors AS ve + WHERE errors.error_id = ve.error_id + AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed + FROM public.errors + WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""", + {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]), + "user_id": user_id}) + cur.execute(query=query) + statuses = helper.list_to_camel_case(cur.fetchall()) + statuses = { + s["errorId"]: s for s in statuses + } + + for r in rows: + r.pop("full_count") + if r["error_id"] in statuses: + r["status"] = statuses[r["error_id"]]["status"] + r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"] + r["favorite"] = statuses[r["error_id"]]["favorite"] + r["viewed"] = statuses[r["error_id"]]["viewed"] + r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"] + else: + r["status"] = "untracked" + r["parent_error_id"] = None + r["favorite"] = False + r["viewed"] = False + r["stack"] = None + + offset = len(rows) + rows = [r for r in rows if r["stack"] is None + or (len(r["stack"]) == 0 or len(r["stack"]) > 1 + or len(r["stack"]) > 0 + and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))] + offset -= len(rows) + return { + "data": { + 'total': total - offset, + 'errors': helper.list_to_camel_case(rows) + } + } + + +# refactor this function after clickhouse structure changes +def search_deprecated(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): + empty_response = {"data": { + 'total': 0, + 'errors': [] + }} platform = None for f in data.filters: if f.type == schemas.FilterType.platform and len(f.value) > 0: From eb760d0e13e99f6fe4b49ab4fceaf00ad1db7d19 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 15 Mar 2022 18:03:46 +0100 Subject: [PATCH 37/60] feat(api): EE changed search-errors --- ee/api/chalicelib/core/errors.py | 198 ++++++++++++++++++++++++++++++- 1 file changed, 195 insertions(+), 3 deletions(-) diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index 88710284b..2705a1115 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -3,7 +3,7 @@ import json import schemas from chalicelib.core import dashboard from chalicelib.core import sourcemaps, sessions -from chalicelib.utils import ch_client +from chalicelib.utils import ch_client, metrics_helper from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC @@ -424,9 +424,9 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n if time_constraint: ch_sub_query += [f"datetime >= toDateTime(%({startTime_arg_name})s/1000)", f"datetime < toDateTime(%({endTime_arg_name})s/1000)"] - if platform == 'mobile': + if platform == schemas.PlatformType.mobile: ch_sub_query.append("user_device_type = 'mobile'") - elif platform == 'desktop': + elif platform == schemas.PlatformType.desktop: ch_sub_query.append("user_device_type = 'desktop'") return ch_sub_query @@ -444,11 +444,203 @@ def __get_sort_key(key): }.get(key, 'max_datetime') +def __get_basic_constraints_pg(platform=None, time_constraint=True, startTime_arg_name="startDate", + endTime_arg_name="endDate", chart=False, step_size_name="step_size", + project_key="project_id"): + if project_key is None: + ch_sub_query = [] + else: + ch_sub_query = [f"{project_key} =%(project_id)s"] + if time_constraint: + ch_sub_query += [f"timestamp >= %({startTime_arg_name})s", + f"timestamp < %({endTime_arg_name})s"] + if chart: + ch_sub_query += [f"timestamp >= generated_timestamp", + f"timestamp < generated_timestamp + %({step_size_name})s"] + if platform == schemas.PlatformType.mobile: + ch_sub_query.append("user_device_type = 'mobile'") + elif platform == schemas.PlatformType.desktop: + ch_sub_query.append("user_device_type = 'desktop'") + return ch_sub_query + + def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): empty_response = {"data": { 'total': 0, 'errors': [] }} + + platform = None + for f in data.filters: + if f.type == schemas.FilterType.platform and len(f.value) > 0: + platform = f.value[0] + pg_sub_query = __get_basic_constraints_pg(platform, project_key="sessions.project_id") + pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'", + "pe.project_id=%(project_id)s"] + pg_sub_query_chart = __get_basic_constraints_pg(platform, time_constraint=False, chart=True, project_key=None) + # pg_sub_query_chart.append("source ='js_exception'") + pg_sub_query_chart.append("errors.error_id =details.error_id") + statuses = [] + error_ids = None + if data.startDate is None: + data.startDate = TimeUTC.now(-30) + if data.endDate is None: + data.endDate = TimeUTC.now(1) + if len(data.events) > 0 or len(data.filters) > 0: + print("-- searching for sessions before errors") + # if favorite_only=True search for sessions associated with favorite_error + statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True, + error_status=data.status) + if len(statuses) == 0: + return empty_response + error_ids = [e["errorId"] for e in statuses] + with pg_client.PostgresClient() as cur: + if data.startDate is None: + data.startDate = TimeUTC.now(-7) + if data.endDate is None: + data.endDate = TimeUTC.now() + step_size = metrics_helper.__get_step_size(data.startDate, data.endDate, data.density, factor=1) + sort = __get_sort_key('datetime') + if data.sort is not None: + sort = __get_sort_key(data.sort) + order = "DESC" + if data.order is not None: + order = data.order + extra_join = "" + + params = { + "startDate": data.startDate, + "endDate": data.endDate, + "project_id": project_id, + "userId": user_id, + "step_size": step_size} + if data.status != schemas.ErrorStatus.all: + pg_sub_query.append("status = %(error_status)s") + params["error_status"] = data.status + if data.limit is not None and data.page is not None: + params["errors_offset"] = (data.page - 1) * data.limit + params["errors_limit"] = data.limit + else: + params["errors_offset"] = 0 + params["errors_limit"] = 200 + + if error_ids is not None: + params["error_ids"] = tuple(error_ids) + pg_sub_query.append("error_id IN %(error_ids)s") + if data.bookmarked: + pg_sub_query.append("ufe.user_id = %(userId)s") + extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" + main_pg_query = f"""\ + SELECT full_count, + error_id, + name, + message, + users, + sessions, + last_occurrence, + first_occurrence, + chart + FROM (SELECT COUNT(details) OVER () AS full_count, details.* + FROM (SELECT error_id, + name, + message, + COUNT(DISTINCT user_uuid) AS users, + COUNT(DISTINCT session_id) AS sessions, + MAX(timestamp) AS max_datetime, + MIN(timestamp) AS min_datetime + FROM events.errors + INNER JOIN public.errors AS pe USING (error_id) + INNER JOIN public.sessions USING (session_id) + {extra_join} + WHERE {" AND ".join(pg_sub_query)} + GROUP BY error_id, name, message + ORDER BY {sort} {order}) AS details + LIMIT %(errors_limit)s OFFSET %(errors_offset)s + ) AS details + INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence, + MIN(timestamp) AS first_occurrence + FROM events.errors + WHERE errors.error_id = details.error_id) AS time_details ON (TRUE) + INNER JOIN LATERAL (SELECT jsonb_agg(chart_details) AS chart + FROM (SELECT generated_timestamp AS timestamp, + COUNT(session_id) AS count + FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL (SELECT DISTINCT session_id + FROM events.errors + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS sessions ON (TRUE) + GROUP BY timestamp + ORDER BY timestamp) AS chart_details) AS chart_details ON (TRUE);""" + + # print("--------------------") + # print(cur.mogrify(main_pg_query, params)) + # print("--------------------") + + cur.execute(cur.mogrify(main_pg_query, params)) + rows = cur.fetchall() + total = 0 if len(rows) == 0 else rows[0]["full_count"] + if flows: + return {"data": {"count": total}} + + if total == 0: + rows = [] + else: + if len(statuses) == 0: + query = cur.mogrify( + """SELECT error_id, status, parent_error_id, payload, + COALESCE((SELECT TRUE + FROM public.user_favorite_errors AS fe + WHERE errors.error_id = fe.error_id + AND fe.user_id = %(user_id)s LIMIT 1), FALSE) AS favorite, + COALESCE((SELECT TRUE + FROM public.user_viewed_errors AS ve + WHERE errors.error_id = ve.error_id + AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed + FROM public.errors + WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""", + {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]), + "user_id": user_id}) + cur.execute(query=query) + statuses = helper.list_to_camel_case(cur.fetchall()) + statuses = { + s["errorId"]: s for s in statuses + } + + for r in rows: + r.pop("full_count") + if r["error_id"] in statuses: + r["status"] = statuses[r["error_id"]]["status"] + r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"] + r["favorite"] = statuses[r["error_id"]]["favorite"] + r["viewed"] = statuses[r["error_id"]]["viewed"] + r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"] + else: + r["status"] = "untracked" + r["parent_error_id"] = None + r["favorite"] = False + r["viewed"] = False + r["stack"] = None + + offset = len(rows) + rows = [r for r in rows if r["stack"] is None + or (len(r["stack"]) == 0 or len(r["stack"]) > 1 + or len(r["stack"]) > 0 + and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))] + offset -= len(rows) + return { + "data": { + 'total': total - offset, + 'errors': helper.list_to_camel_case(rows) + } + } + + +# refactor this function after clickhouse structure changes +def search_deprecated(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): + empty_response = {"data": { + 'total': 0, + 'errors': [] + }} platform = None for f in data.filters: if f.type == schemas.FilterType.platform and len(f.value) > 0: From 43701dd132985c5745162476fa851848f25f0482 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 15 Mar 2022 18:46:19 +0100 Subject: [PATCH 38/60] feat(api): FOSS search-errors by name and message feat(api): EE search-errors by name and message --- api/chalicelib/core/errors.py | 8 ++++++-- api/chalicelib/core/sessions.py | 6 +++--- api/schemas.py | 1 + ee/api/chalicelib/core/errors.py | 10 +++++++--- 4 files changed, 17 insertions(+), 8 deletions(-) diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index 4d568dc59..c1f5633de 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -490,8 +490,12 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): if data.bookmarked: pg_sub_query.append("ufe.user_id = %(userId)s") extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" - main_pg_query = f"""\ - SELECT full_count, + if data.query is not None and len(data.query) > 0: + pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)") + params["error_query"] = helper.values_for_operator(value=data.query, + op=schemas.SearchEventOperator._contains) + + main_pg_query = f"""SELECT full_count, error_id, name, message, diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 45a1bd52c..074629f97 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -169,7 +169,7 @@ def _isUndefined_operator(op: schemas.SearchEventOperator): @dev.timed def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, - error_status="ALL", count_only=False, issue=None): + error_status=schemas.ErrorStatus.all, count_only=False, issue=None): full_args, query_part, sort = search_query_parts(data=data, error_status=error_status, errors_only=errors_only, favorite_only=data.bookmarked, issue=issue, project_id=project_id, user_id=user_id) @@ -962,9 +962,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)" extra_constraints.append("ser.source = 'js_exception'") extra_constraints.append("ser.project_id = %(project_id)s") - if error_status != "ALL": + if error_status != schemas.ErrorStatus.all: extra_constraints.append("ser.status = %(error_status)s") - full_args["error_status"] = error_status.lower() + full_args["error_status"] = error_status if favorite_only: extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" extra_constraints.append("ufe.user_id = %(userId)s") diff --git a/api/schemas.py b/api/schemas.py index 76fc754bb..1d0e18dfb 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -700,6 +700,7 @@ class ErrorStatus(str, Enum): class SearchErrorsSchema(SessionsSearchPayloadSchema): density: Optional[int] = Field(7) status: Optional[ErrorStatus] = Field(default=ErrorStatus.all) + query: Optional[str] = Field(default=None) class MetricPayloadSchema(BaseModel): diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index 2705a1115..407e94052 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -530,8 +530,12 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): if data.bookmarked: pg_sub_query.append("ufe.user_id = %(userId)s") extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" - main_pg_query = f"""\ - SELECT full_count, + if data.query is not None and len(data.query) > 0: + pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)") + params["error_query"] = helper.values_for_operator(value=data.query, + op=schemas.SearchEventOperator._contains) + + main_pg_query = f"""SELECT full_count, error_id, name, message, @@ -635,7 +639,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): } -# refactor this function after clickhouse structure changes +# refactor this function after clickhouse structure changes (missing search by query) def search_deprecated(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False): empty_response = {"data": { 'total': 0, From 25239486163dbc4e7a1511dcde409d3207314009 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 15 Mar 2022 19:12:15 +0100 Subject: [PATCH 39/60] feat(ui) - errors pagination --- frontend/app/components/Errors/List/List.js | 4 ++-- frontend/app/duck/errors.js | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/app/components/Errors/List/List.js b/frontend/app/components/Errors/List/List.js index 5fb8e49f6..2fa91c5e5 100644 --- a/frontend/app/components/Errors/List/List.js +++ b/frontend/app/components/Errors/List/List.js @@ -12,8 +12,8 @@ import { debounce } from 'App/utils'; const PER_PAGE = 10; const sortOptionsMap = { - 'lastOccurrence-desc': 'Last Occurrence', - 'firstOccurrence-desc': 'First Occurrence', + 'occurrence-desc': 'Last Occurrence', + 'occurrence-desc': 'First Occurrence', 'sessions-asc': 'Sessions Ascending', 'sessions-desc': 'Sessions Descending', 'users-asc': 'Users Ascending', diff --git a/frontend/app/duck/errors.js b/frontend/app/duck/errors.js index 0d2dede92..1f41f823a 100644 --- a/frontend/app/duck/errors.js +++ b/frontend/app/duck/errors.js @@ -9,7 +9,7 @@ import { reduceThenFetchResource } from './search' const name = "error"; const idKey = "errorId"; const PER_PAGE = 10; -const DEFAULT_SORT = 'lastOccurrence'; +const DEFAULT_SORT = 'occurrence'; const DEFAULT_ORDER = 'desc'; const EDIT_OPTIONS = `${name}/EDIT_OPTIONS`; From 407666e755161fca42adcd68726047c2c012cb3e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 15 Mar 2022 19:13:20 +0100 Subject: [PATCH 40/60] feat(api): FOSS search-errors sort feat(api): EE search-errors sort --- api/chalicelib/core/errors.py | 6 +++--- api/schemas.py | 9 ++++++++- ee/api/chalicelib/core/errors.py | 6 +++--- 3 files changed, 14 insertions(+), 7 deletions(-) diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index c1f5633de..a7f863e79 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -418,9 +418,9 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n def __get_sort_key(key): return { - "datetime": "max_datetime", - "lastOccurrence": "max_datetime", - "firstOccurrence": "min_datetime" + schemas.ErrorSort.occurrence: "max_datetime", + schemas.ErrorSort.users_count: "users", + schemas.ErrorSort.sessions_count: "sessions" }.get(key, 'max_datetime') diff --git a/api/schemas.py b/api/schemas.py index 1d0e18dfb..3b4fefbd6 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -600,7 +600,7 @@ class SessionsSearchPayloadSchema(BaseModel): startDate: int = Field(None) endDate: int = Field(None) sort: str = Field(default="startTs") - order: str = Field(default="DESC") + order: Literal["asc", "desc"] = Field(default="desc") events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then) group_by_user: bool = Field(default=False) limit: int = Field(default=200, gt=0, le=200) @@ -697,7 +697,14 @@ class ErrorStatus(str, Enum): ignored = 'ignored' +class ErrorSort(str, Enum): + occurrence = 'occurrence' + users_count = 'users' + sessions_count = 'sessions' + + class SearchErrorsSchema(SessionsSearchPayloadSchema): + sort: ErrorSort = Field(default=ErrorSort.occurrence) density: Optional[int] = Field(7) status: Optional[ErrorStatus] = Field(default=ErrorStatus.all) query: Optional[str] = Field(default=None) diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index 407e94052..8531d89a3 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -438,9 +438,9 @@ def __get_step_size(startTimestamp, endTimestamp, density): def __get_sort_key(key): return { - "datetime": "max_datetime", - "lastOccurrence": "max_datetime", - "firstOccurrence": "min_datetime" + schemas.ErrorSort.occurrence: "max_datetime", + schemas.ErrorSort.users_count: "users", + schemas.ErrorSort.sessions_count: "sessions" }.get(key, 'max_datetime') From 6e20899102347f7c510433092e65ef8f9db79f85 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Mon, 14 Mar 2022 13:28:30 +0100 Subject: [PATCH 41/60] fix(actions): get worker images Signed-off-by: rjshrjndrn --- .github/workflows/workers-ee.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/workers-ee.yaml b/.github/workflows/workers-ee.yaml index 4588ccb09..a61d75160 100644 --- a/.github/workflows/workers-ee.yaml +++ b/.github/workflows/workers-ee.yaml @@ -47,7 +47,7 @@ jobs: # # Getting the images to build # - git diff --name-only HEAD HEAD~1 | grep backend/services | grep -vE ^ee/ | cut -d '/' -f3 | uniq > backend/images_to_build.txt + git diff --name-only HEAD HEAD~1 | grep backend/services | cut -d '/' -f3 | uniq > backend/images_to_build.txt [[ $(cat backend/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 0) # # Pushing image to registry From 0c6f3c9a799483e2636857efa1eafd9c8136db29 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 15 Mar 2022 19:41:08 +0100 Subject: [PATCH 42/60] feat(ui) - errors pagination --- frontend/app/duck/errors.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/duck/errors.js b/frontend/app/duck/errors.js index 1f41f823a..ea8179dcd 100644 --- a/frontend/app/duck/errors.js +++ b/frontend/app/duck/errors.js @@ -135,7 +135,7 @@ export const fetchList = (params = {}, clear = false) => (dispatch, getState) => return dispatch({ types: array(FETCH_LIST), - call: client => client.post('/errors/search', { ...params, ...options }), + call: client => client.post('/errors/search', { ...params, ...options.toJS() }), clear, params: cleanParams(params), }); From 16d73b6299fa56964afff787e532f7bff41dd7e3 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 15 Mar 2022 19:58:15 +0100 Subject: [PATCH 43/60] feat(api): search-errors ignore sort value --- api/schemas.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/api/schemas.py b/api/schemas.py index 3b4fefbd6..ba0085ef9 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -704,7 +704,8 @@ class ErrorSort(str, Enum): class SearchErrorsSchema(SessionsSearchPayloadSchema): - sort: ErrorSort = Field(default=ErrorSort.occurrence) + # sort: ErrorSort = Field(default=ErrorSort.occurrence) + sort: str = Field(default=ErrorSort.occurrence.value) density: Optional[int] = Field(7) status: Optional[ErrorStatus] = Field(default=ErrorStatus.all) query: Optional[str] = Field(default=None) From f17a0b324e85a84b3bb633a231b8d4661253bbf8 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 15 Mar 2022 20:02:22 +0100 Subject: [PATCH 44/60] fix(ui) - metadata --- frontend/app/components/Errors/Errors.js | 4 +++- frontend/app/duck/errors.js | 7 ++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/frontend/app/components/Errors/Errors.js b/frontend/app/components/Errors/Errors.js index c29a5f200..10812558d 100644 --- a/frontend/app/components/Errors/Errors.js +++ b/frontend/app/components/Errors/Errors.js @@ -8,6 +8,7 @@ import { fetchList as fetchSlackList } from 'Duck/integrations/slack'; import { errors as errorsRoute, isRoute } from "App/routes"; import DateRange from 'Components/BugFinder/DateRange'; import withPageTitle from 'HOCs/withPageTitle'; +import cn from 'classnames'; import List from './List/List'; import ErrorInfo from './Error/ErrorInfo'; @@ -78,11 +79,12 @@ export default class Errors extends React.PureComponent { }, status, list, + history, } = this.props; return (
-
+
(dispatch, getState) => params.page = getState().getIn(['errors', 'currentPage']); params.limit = PER_PAGE; - const options = getState().getIn(['errors', 'options']); - if (options.get("status") === BOOKMARK) { + const options = getState().getIn(['errors', 'options']).toJS(); + if (options.status === BOOKMARK) { options.bookmarked = true; + options.status = 'all'; } return dispatch({ types: array(FETCH_LIST), - call: client => client.post('/errors/search', { ...params, ...options.toJS() }), + call: client => client.post('/errors/search', { ...params, ...options }), clear, params: cleanParams(params), }); From 9a218159901c64193b1b2970f6f3fbc7b45da500 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 15 Mar 2022 20:25:01 +0100 Subject: [PATCH 45/60] feat(api): search-errors sort value --- api/schemas.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/api/schemas.py b/api/schemas.py index ba0085ef9..3b4fefbd6 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -704,8 +704,7 @@ class ErrorSort(str, Enum): class SearchErrorsSchema(SessionsSearchPayloadSchema): - # sort: ErrorSort = Field(default=ErrorSort.occurrence) - sort: str = Field(default=ErrorSort.occurrence.value) + sort: ErrorSort = Field(default=ErrorSort.occurrence) density: Optional[int] = Field(7) status: Optional[ErrorStatus] = Field(default=ErrorStatus.all) query: Optional[str] = Field(default=None) From e043c400d5a5b0e5ea9489b0aaaa9586905f522f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 16 Mar 2022 13:36:22 +0100 Subject: [PATCH 46/60] feat(utilities): FOSS-WS get live sessions by userId --- utilities/servers/websocket.js | 74 +++++++++++++++++++++++++++++----- 1 file changed, 64 insertions(+), 10 deletions(-) diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index b772228be..c902f1b9d 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -17,14 +17,29 @@ const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED"; let io; let debug = process.env.debug === "1" || false; -const socketsList = function (req, res) { +const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); + let userId; + if (req.query.userId) { + debug && console.log(`[WS]where userId=${req.query.userId}`); + userId = req.query.userId; + } let liveSessions = {}; for (let peerId of io.sockets.adapter.rooms.keys()) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey !== undefined) { - liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(sessionId); + if (userId) { + const connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey] = liveSessions[projectKey] || []; + liveSessions[projectKey].push(sessionId); + } + } + } else { + liveSessions[projectKey] = liveSessions[projectKey] || []; + liveSessions[projectKey].push(sessionId); + } } } res.statusCode = 200; @@ -33,14 +48,29 @@ const socketsList = function (req, res) { } wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList); -const socketsListByProject = function (req, res) { +const socketsListByProject = async function (req, res) { debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`); + let userId; + if (req.query.userId) { + debug && console.log(`[WS]where userId=${req.query.userId}`); + userId = req.query.userId; + } let liveSessions = {}; for (let peerId of io.sockets.adapter.rooms.keys()) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey === req.params.projectKey) { - liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(sessionId); + if (userId) { + const connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey] = liveSessions[projectKey] || []; + liveSessions[projectKey].push(sessionId); + } + } + } else { + liveSessions[projectKey] = liveSessions[projectKey] || []; + liveSessions[projectKey].push(sessionId); + } } } res.statusCode = 200; @@ -51,6 +81,11 @@ wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByPro const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); + let userId; + if (req.query.userId) { + debug && console.log(`[WS]where userId=${req.query.userId}`); + userId = req.query.userId; + } let liveSessions = {}; for (let peerId of io.sockets.adapter.rooms.keys()) { let {projectKey, sessionId} = extractPeerId(peerId); @@ -58,8 +93,15 @@ const socketsLive = async function (req, res) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { - liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(item.handshake.query.sessionInfo); + if (userId) { + if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey] = liveSessions[projectKey] || []; + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } + } else { + liveSessions[projectKey] = liveSessions[projectKey] || []; + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } } } } @@ -73,6 +115,11 @@ wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`); + let userId; + if (req.query.userId) { + debug && console.log(`[WS]where userId=${req.query.userId}`); + userId = req.query.userId; + } let liveSessions = {}; for (let peerId of io.sockets.adapter.rooms.keys()) { let {projectKey, sessionId} = extractPeerId(peerId); @@ -80,8 +127,15 @@ const socketsLiveByProject = async function (req, res) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { - liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(item.handshake.query.sessionInfo); + if (userId) { + if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey] = liveSessions[projectKey] || []; + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } + } else { + liveSessions[projectKey] = liveSessions[projectKey] || []; + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } } } } From d96dc8b6ade8ee6dc367c318e94993cf5c2d7582 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 16 Mar 2022 13:47:42 +0100 Subject: [PATCH 47/60] feat(utilities): FOSS-WS refactored get live sessions by userId --- utilities/servers/websocket.js | 46 +++++++++++++--------------------- 1 file changed, 17 insertions(+), 29 deletions(-) diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index c902f1b9d..e23002246 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -12,32 +12,35 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED"; const AGENTS_CONNECTED = "AGENTS_CONNECTED"; const NO_SESSIONS = "SESSION_DISCONNECTED"; const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED"; -// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000; let io; let debug = process.env.debug === "1" || false; +const extractUserIdFromRequest = function (req) { + if (req.query.userId) { + debug && console.log(`[WS]where userId=${req.query.userId}`); + return req.query.userId; + } + return undefined; +} + const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); - let userId; - if (req.query.userId) { - debug && console.log(`[WS]where userId=${req.query.userId}`); - userId = req.query.userId; - } + let userId = extractUserIdFromRequest(req); + let liveSessions = {}; for (let peerId of io.sockets.adapter.rooms.keys()) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey !== undefined) { + liveSessions[projectKey] = liveSessions[projectKey] || []; if (userId) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { - liveSessions[projectKey] = liveSessions[projectKey] || []; liveSessions[projectKey].push(sessionId); } } } else { - liveSessions[projectKey] = liveSessions[projectKey] || []; liveSessions[projectKey].push(sessionId); } } @@ -50,25 +53,20 @@ wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`); - let userId; - if (req.query.userId) { - debug && console.log(`[WS]where userId=${req.query.userId}`); - userId = req.query.userId; - } + let userId = extractUserIdFromRequest(req); let liveSessions = {}; for (let peerId of io.sockets.adapter.rooms.keys()) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey === req.params.projectKey) { + liveSessions[projectKey] = liveSessions[projectKey] || []; if (userId) { const connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { - liveSessions[projectKey] = liveSessions[projectKey] || []; liveSessions[projectKey].push(sessionId); } } } else { - liveSessions[projectKey] = liveSessions[projectKey] || []; liveSessions[projectKey].push(sessionId); } } @@ -81,11 +79,7 @@ wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByPro const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); - let userId; - if (req.query.userId) { - debug && console.log(`[WS]where userId=${req.query.userId}`); - userId = req.query.userId; - } + let userId = extractUserIdFromRequest(req); let liveSessions = {}; for (let peerId of io.sockets.adapter.rooms.keys()) { let {projectKey, sessionId} = extractPeerId(peerId); @@ -93,13 +87,12 @@ const socketsLive = async function (req, res) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { + liveSessions[projectKey] = liveSessions[projectKey] || []; if (userId) { if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { - liveSessions[projectKey] = liveSessions[projectKey] || []; liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { - liveSessions[projectKey] = liveSessions[projectKey] || []; liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } @@ -115,11 +108,7 @@ wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`); - let userId; - if (req.query.userId) { - debug && console.log(`[WS]where userId=${req.query.userId}`); - userId = req.query.userId; - } + let userId = extractUserIdFromRequest(req); let liveSessions = {}; for (let peerId of io.sockets.adapter.rooms.keys()) { let {projectKey, sessionId} = extractPeerId(peerId); @@ -127,13 +116,12 @@ const socketsLiveByProject = async function (req, res) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { + liveSessions[projectKey] = liveSessions[projectKey] || []; if (userId) { if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { - liveSessions[projectKey] = liveSessions[projectKey] || []; liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } else { - liveSessions[projectKey] = liveSessions[projectKey] || []; liveSessions[projectKey].push(item.handshake.query.sessionInfo); } } From 2748d29b3f6527ce76dc8569cf872404d0de8f81 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Wed, 16 Mar 2022 13:55:50 +0100 Subject: [PATCH 48/60] change(ui) - show last played session --- .../BugFinder/SessionList/SessionList.js | 15 +++---------- .../app/components/Session_/Player/Player.js | 4 ++++ .../shared/SessionItem/SessionItem.js | 22 +++++++++++++------ frontend/app/duck/sessions.js | 21 +++++++++++++++++- 4 files changed, 42 insertions(+), 20 deletions(-) diff --git a/frontend/app/components/BugFinder/SessionList/SessionList.js b/frontend/app/components/BugFinder/SessionList/SessionList.js index 858e9cb30..f5152222a 100644 --- a/frontend/app/components/BugFinder/SessionList/SessionList.js +++ b/frontend/app/components/BugFinder/SessionList/SessionList.js @@ -21,6 +21,7 @@ var timeoutId; filters: state.getIn([ 'search', 'instance', 'filters' ]), metaList: state.getIn(['customFields', 'list']).map(i => i.key), currentPage: state.getIn([ 'search', 'currentPage' ]), + lastPlayedSessionId: state.getIn([ 'sessions', 'lastPlayedSessionId' ]), }), { applyFilter, addAttribute, @@ -90,6 +91,7 @@ export default class SessionList extends React.PureComponent { metaList, currentPage, total, + lastPlayedSessionId, } = this.props; const _filterKeys = filters.map(i => i.key); const hasUserFilter = _filterKeys.includes(FilterKey.USERID) || _filterKeys.includes(FilterKey.USERANONYMOUSID); @@ -127,6 +129,7 @@ export default class SessionList extends React.PureComponent { hasUserFilter={hasUserFilter} onUserClick={this.onUserClick} metaList={metaList} + lastPlayedSessionId={lastPlayedSessionId} /> ))} @@ -139,18 +142,6 @@ export default class SessionList extends React.PureComponent { debounceRequest={1000} />
- {/* - Haven't found the session in the above list?
Try being a bit more specific by setting a specific time frame or simply use different filters -
- } - /> */} ); } diff --git a/frontend/app/components/Session_/Player/Player.js b/frontend/app/components/Session_/Player/Player.js index 0f0b51786..7391c8992 100644 --- a/frontend/app/components/Session_/Player/Player.js +++ b/frontend/app/components/Session_/Player/Player.js @@ -9,6 +9,7 @@ import Controls from './Controls'; import Overlay from './Overlay'; import stl from './player.css'; import EventsToggleButton from '../../Session/EventsToggleButton'; +import { updateLastPlayedSession } from 'Duck/sessions'; @connectPlayer(state => ({ live: state.live, @@ -18,16 +19,19 @@ import EventsToggleButton from '../../Session/EventsToggleButton'; return { fullscreen: state.getIn([ 'components', 'player', 'fullscreen' ]), nextId: state.getIn([ 'sessions', 'nextId' ]), + sessionId: state.getIn([ 'sessions', 'current', 'sessionId' ]), closedLive: !!state.getIn([ 'sessions', 'errors' ]) || (isAssist && !state.getIn([ 'sessions', 'current', 'live' ])), } }, { hideTargetDefiner, fullscreenOff, + updateLastPlayedSession, }) export default class Player extends React.PureComponent { screenWrapper = React.createRef(); componentDidMount() { + this.props.updateLastPlayedSession(this.props.sessionId); if (this.props.closedLive) return; const parentElement = findDOMNode(this.screenWrapper.current); //TODO: good architecture diff --git a/frontend/app/components/shared/SessionItem/SessionItem.js b/frontend/app/components/shared/SessionItem/SessionItem.js index 0b7551760..64e4199ba 100644 --- a/frontend/app/components/shared/SessionItem/SessionItem.js +++ b/frontend/app/components/shared/SessionItem/SessionItem.js @@ -3,29 +3,25 @@ import cn from 'classnames'; import { Link, Icon, - OsIcon, - BrowserIcon, CountryFlag, Avatar, TextEllipsis, Label, } from 'UI'; -import { deviceTypeIcon } from 'App/iconNames'; import { toggleFavorite, setSessionPath } from 'Duck/sessions'; import { session as sessionRoute, liveSession as liveSessionRoute, withSiteId } from 'App/routes'; import { durationFormatted, formatTimeOrDate } from 'App/date'; import stl from './sessionItem.css'; -import LiveTag from 'Shared/LiveTag'; -import Bookmark from 'Shared/Bookmark'; import Counter from './Counter' import { withRouter } from 'react-router-dom'; import SessionMetaList from './SessionMetaList'; import ErrorBars from './ErrorBars'; -import { assist as assistRoute, liveSession, isRoute } from "App/routes"; +import { assist as assistRoute, liveSession, sessions as sessionsRoute, isRoute } from "App/routes"; import { capitalize } from 'App/utils'; const ASSIST_ROUTE = assistRoute(); const ASSIST_LIVE_SESSION = liveSession() +const SESSIONS_ROUTE = sessionsRoute(); // const Label = ({ label = '', color = 'color-gray-medium'}) => ( //
{label}
@@ -69,10 +65,13 @@ export default class SessionItem extends React.PureComponent { disableUser = false, metaList = [], showActive = false, + lastPlayedSessionId, } = this.props; const formattedDuration = durationFormatted(duration); const hasUserId = userId || userAnonymousId; + const isSessions = isRoute(SESSIONS_ROUTE, this.props.location.pathname); const isAssist = isRoute(ASSIST_ROUTE, this.props.location.pathname) || isRoute(ASSIST_LIVE_SESSION, this.props.location.pathname); + const isLastPlayed = lastPlayedSessionId === sessionId; const _metaList = Object.keys(metadata).filter(i => metaList.includes(i)).map(key => { const value = metadata[key]; @@ -125,7 +124,7 @@ export default class SessionItem extends React.PureComponent {
- { !isAssist && ( + { isSessions && (
@@ -139,6 +138,15 @@ export default class SessionItem extends React.PureComponent { )}
+ { isSessions && ( +
+ { isLastPlayed && ( + + )} +
+ )} diff --git a/frontend/app/duck/sessions.js b/frontend/app/duck/sessions.js index f3df333c7..34109ada3 100644 --- a/frontend/app/duck/sessions.js +++ b/frontend/app/duck/sessions.js @@ -7,9 +7,9 @@ import withRequestState, { RequestTypes } from './requestStateCreator'; import { getRE } from 'App/utils'; import { LAST_7_DAYS } from 'Types/app/period'; import { getDateRangeFromValue } from 'App/dateRange'; +const name = 'sessions'; const INIT = 'sessions/INIT'; - const FETCH_LIST = new RequestTypes('sessions/FETCH_LIST'); const FETCH = new RequestTypes('sessions/FETCH'); const FETCH_FAVORITE_LIST = new RequestTypes('sessions/FETCH_FAVORITE_LIST'); @@ -26,6 +26,7 @@ const TOGGLE_CHAT_WINDOW = 'sessions/TOGGLE_CHAT_WINDOW'; const SET_FUNNEL_PAGE_FLAG = 'sessions/SET_FUNNEL_PAGE_FLAG'; const SET_TIMELINE_POINTER = 'sessions/SET_TIMELINE_POINTER'; const SET_SESSION_PATH = 'sessions/SET_SESSION_PATH'; +const LAST_PLAYED_SESSION_ID = `${name}/LAST_PLAYED_SESSION_ID`; const SET_ACTIVE_TAB = 'sessions/SET_ACTIVE_TAB'; @@ -60,6 +61,7 @@ const initialState = Map({ funnelPage: Map(), timelinePointer: null, sessionPath: '', + lastPlayedSessionId: null, }); const reducer = (state = initialState, action = {}) => { @@ -248,11 +250,21 @@ const reducer = (state = initialState, action = {}) => { return state.set('timelinePointer', action.pointer); case SET_SESSION_PATH: return state.set('sessionPath', action.path); + case LAST_PLAYED_SESSION_ID: + return updateListItem(state, action.sessionId, { viewed: true }).set('lastPlayedSessionId', action.sessionId); default: return state; } }; +function updateListItem(state, sourceSessionId, instance) { + const list = state.get('list'); + const index = list.findIndex(({ sessionId }) => sessionId === sourceSessionId); + if (index === -1) return state; + + return state.updateIn([ 'list', index ], session => session.merge(instance)); +} + export default withRequestState({ _: [ FETCH, FETCH_LIST ], fetchLiveListRequest: FETCH_LIVE_LIST, @@ -390,4 +402,11 @@ export function setSessionPath(path) { type: SET_SESSION_PATH, path } +} + +export function updateLastPlayedSession(sessionId) { + return { + type: LAST_PLAYED_SESSION_ID, + sessionId, + }; } \ No newline at end of file From ca8e96354662c227d89200c337e10b83b076f429 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 16 Mar 2022 15:16:42 +0100 Subject: [PATCH 49/60] feat(utilities): FOSS&EE assist-standalone refactor feat(utilities): FOSS&EE assist-standalone search by userId --- ee/utilities/server.js | 2 +- ee/utilities/servers/websocket.js | 199 ++++++++++++++++++------------ scripts/helmcharts/vars.yaml | 2 +- utilities/servers/websocket.js | 85 ++++++++----- 4 files changed, 176 insertions(+), 112 deletions(-) diff --git a/ee/utilities/server.js b/ee/utilities/server.js index f1209c9ff..d049faa19 100644 --- a/ee/utilities/server.js +++ b/ee/utilities/server.js @@ -3,7 +3,7 @@ var {peerRouter, peerConnection, peerDisconnect, peerError} = require('./servers var express = require('express'); const {ExpressPeerServer} = require('peer'); var socket; -if (process.env.cluster === "true") { +if (process.env.redis === "true") { console.log("Using Redis"); socket = require("./servers/websocket-cluster"); } else { diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index e087dba31..4aa048b1f 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -2,8 +2,8 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); const geoip2Reader = require('@maxmind/geoip2-node').Reader; -var {extractPeerId} = require('./peerjs-server'); -var wsRouter = express.Router(); +const {extractPeerId} = require('./peerjs-server'); +const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; const IDENTITIES = {agent: 'agent', session: 'session'}; const NEW_AGENT = "NEW_AGENT"; @@ -12,22 +12,61 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED"; const AGENTS_CONNECTED = "AGENTS_CONNECTED"; const NO_SESSIONS = "SESSION_DISCONNECTED"; const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED"; -// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000; let io; -let debug = process.env.debug === "1" || false; +const debug = process.env.debug === "1" || false; -const socketsList = function (req, res) { - debug && console.log("[WS]looking for all available sessions"); - let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { - let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey !== undefined) { - liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(sessionId); - } +const createSocketIOServer = function (server) { + if (process.env.uws !== "true") { + io = _io(server, { + maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + cors: { + origin: "*", + methods: ["GET", "POST", "PUT"] + }, + path: '/socket' + }); + } else { + io = new _io.Server({ + maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + cors: { + origin: "*", + methods: ["GET", "POST", "PUT"] + }, + path: '/socket', + // transports: ['websocket'], + // upgrade: false + }); + io.attachApp(server); } - let result = {"data": liveSessions}; +} + +const extractUserIdFromRequest = function (req) { + if (req.query.userId) { + debug && console.log(`[WS]where userId=${req.query.userId}`); + return req.query.userId; + } + return undefined; +} + +const extractProjectKeyFromRequest = function (req) { + if (process.env.uws === "true") { + debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`); + return req.getParameter(0); + } else if (req.params.projectKey) { + debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); + return req.params.projectKey; + } + return undefined; +} + + +const getAvailableRooms = async function () { + return io.sockets.adapter.rooms.keys(); +} + +const respond = function (res, data) { + let result = {data} if (process.env.uws !== "true") { res.statusCode = 200; res.setHeader('Content-Type', 'application/json'); @@ -36,84 +75,111 @@ const socketsList = function (req, res) { res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); } } -wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList); -const socketsListByProject = function (req, res) { - if (process.env.uws === "true") { - req.params = {projectKey: req.getParameter(0)}; - } - debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`); +const socketsList = async function (req, res) { + debug && console.log("[WS]looking for all available sessions"); + let userId = extractUserIdFromRequest(req); + let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === req.params.projectKey) { + if (projectKey !== undefined) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(sessionId); + if (userId) { + const connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(sessionId); + } + } + } else { + liveSessions[projectKey].push(sessionId); + } } } - let result = {"data": liveSessions[req.params.projectKey] || []}; - if (process.env.uws !== "true") { - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(result)); - } else { - res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); + respond(res, liveSessions); +} +wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList); + +const socketsListByProject = async function (req, res) { + debug && console.log("[WS]looking for available sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let userId = extractUserIdFromRequest(req); + let liveSessions = {}; + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { + let {projectKey, sessionId} = extractPeerId(peerId); + if (projectKey === _projectKey) { + liveSessions[projectKey] = liveSessions[projectKey] || []; + if (userId) { + const connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(sessionId); + } + } + } else { + liveSessions[projectKey].push(sessionId); + } + } } + respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); + let userId = extractUserIdFromRequest(req); let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey !== undefined) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(item.handshake.query.sessionInfo); + if (userId) { + if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } + } else { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } } } } } - let result = {"data": liveSessions}; - if (process.env.uws !== "true") { - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(result)); - } else { - res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); - } + respond(res, liveSessions); } wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { - if (process.env.uws === "true") { - req.params = {projectKey: req.getParameter(0)}; - } - debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`); + debug && console.log("[WS]looking for available LIVE sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let userId = extractUserIdFromRequest(req); let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === req.params.projectKey) { + if (projectKey === _projectKey) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(item.handshake.query.sessionInfo); + if (userId) { + if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } + } else { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } } } } } - let result = {"data": liveSessions[req.params.projectKey] || []}; - if (process.env.uws !== "true") { - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(result)); - } else { - res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); - } + respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject); @@ -193,28 +259,7 @@ function extractSessionInfo(socket) { module.exports = { wsRouter, start: (server) => { - if (process.env.uws !== "true") { - io = _io(server, { - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, - cors: { - origin: "*", - methods: ["GET", "POST", "PUT"] - }, - path: '/socket' - }); - } else { - io = new _io.Server({ - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, - cors: { - origin: "*", - methods: ["GET", "POST", "PUT"] - }, - path: '/socket', - // transports: ['websocket'], - // upgrade: false - }); - io.attachApp(server); - } + createSocketIOServer(server); io.on('connection', async (socket) => { debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`); socket.peerId = socket.handshake.query.peerId; diff --git a/scripts/helmcharts/vars.yaml b/scripts/helmcharts/vars.yaml index 25d6a67d6..f5cc11067 100644 --- a/scripts/helmcharts/vars.yaml +++ b/scripts/helmcharts/vars.yaml @@ -100,7 +100,7 @@ utilities: env: debug: 0 uws: false - cluster: false + redis: false # If you want to override something # chartname: diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index e23002246..66d0fcde0 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -2,8 +2,8 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); const geoip2Reader = require('@maxmind/geoip2-node').Reader; -var {extractPeerId} = require('./peerjs-server'); -var wsRouter = express.Router(); +const {extractPeerId} = require('./peerjs-server'); +const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; const IDENTITIES = {agent: 'agent', session: 'session'}; const NEW_AGENT = "NEW_AGENT"; @@ -14,7 +14,19 @@ const NO_SESSIONS = "SESSION_DISCONNECTED"; const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED"; let io; -let debug = process.env.debug === "1" || false; +const debug = process.env.debug === "1" || false; + +const createSocketIOServer = function (server) { + io = _io(server, { + maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + cors: { + origin: "*", + methods: ["GET", "POST", "PUT"] + }, + path: '/socket' + }); +} + const extractUserIdFromRequest = function (req) { if (req.query.userId) { debug && console.log(`[WS]where userId=${req.query.userId}`); @@ -23,13 +35,32 @@ const extractUserIdFromRequest = function (req) { return undefined; } +const extractProjectKeyFromRequest = function (req) { + if (req.params.projectKey) { + debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); + return req.params.projectKey; + } + return undefined; +} + + +const getAvailableRooms = async function () { + return io.sockets.adapter.rooms.keys(); +} + +const respond = function (res, data) { + res.statusCode = 200; + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify({"data": data})); +} const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); let userId = extractUserIdFromRequest(req); let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey !== undefined) { liveSessions[projectKey] = liveSessions[projectKey] || []; @@ -45,19 +76,19 @@ const socketsList = async function (req, res) { } } } - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify({"data": liveSessions})); + respond(res, liveSessions); } wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { - debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`); + debug && console.log("[WS]looking for available sessions"); + let _projectKey = extractProjectKeyFromRequest(req); let userId = extractUserIdFromRequest(req); let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === req.params.projectKey) { + if (projectKey === _projectKey) { liveSessions[projectKey] = liveSessions[projectKey] || []; if (userId) { const connected_sockets = await io.in(peerId).fetchSockets(); @@ -71,9 +102,7 @@ const socketsListByProject = async function (req, res) { } } } - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify({"data": liveSessions[req.params.projectKey] || []})); + respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject); @@ -81,7 +110,8 @@ const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); let userId = extractUserIdFromRequest(req); let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey !== undefined) { let connected_sockets = await io.in(peerId).fetchSockets(); @@ -99,20 +129,19 @@ const socketsLive = async function (req, res) { } } } - - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify({"data": liveSessions})); + respond(res, liveSessions); } wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { - debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`); + debug && console.log("[WS]looking for available LIVE sessions"); + let _projectKey = extractProjectKeyFromRequest(req); let userId = extractUserIdFromRequest(req); let liveSessions = {}; - for (let peerId of io.sockets.adapter.rooms.keys()) { + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === req.params.projectKey) { + if (projectKey === _projectKey) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { @@ -128,9 +157,7 @@ const socketsLiveByProject = async function (req, res) { } } } - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify({"data": liveSessions[req.params.projectKey] || []})); + respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject); @@ -210,15 +237,7 @@ function extractSessionInfo(socket) { module.exports = { wsRouter, start: (server) => { - io = _io(server, { - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, - cors: { - origin: "*", - methods: ["GET", "POST", "PUT"] - }, - path: '/socket' - }); - + createSocketIOServer(server); io.on('connection', async (socket) => { debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`); socket.peerId = socket.handshake.query.peerId; From 55bf3c703660044e35dcecc0f8245ee4743d8f7d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 16 Mar 2022 15:24:16 +0100 Subject: [PATCH 50/60] feat(utilities): EE assist-standalone fixed extract userId --- ee/utilities/servers/websocket.js | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index 4aa048b1f..7e5f062fa 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -42,7 +42,12 @@ const createSocketIOServer = function (server) { } const extractUserIdFromRequest = function (req) { - if (req.query.userId) { + if (process.env.uws === "true") { + if (req.getQuery("userId")) { + debug && console.log(`[WS]where userId=${req.getQuery("userId")}`); + return req.getQuery("userId"); + } + } else if (req.query.userId) { debug && console.log(`[WS]where userId=${req.query.userId}`); return req.query.userId; } @@ -51,8 +56,10 @@ const extractUserIdFromRequest = function (req) { const extractProjectKeyFromRequest = function (req) { if (process.env.uws === "true") { - debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`); - return req.getParameter(0); + if (req.getParameter(0)) { + debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`); + return req.getParameter(0); + } } else if (req.params.projectKey) { debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); return req.params.projectKey; From 83325af69fcaefd59e8b9a4d928f559c1457c659 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 16 Mar 2022 15:49:54 +0100 Subject: [PATCH 51/60] feat(utilities): FOSS&EE assist-standalone refactored feat(utilities): EE assist-redis refactored feat(utilities): EE assist-redis search by userId --- ee/utilities/servers/websocket-cluster.js | 210 +++++++++++++--------- ee/utilities/servers/websocket.js | 6 +- utilities/servers/websocket.js | 6 +- 3 files changed, 134 insertions(+), 88 deletions(-) diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index 940f83879..f1cba3014 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -5,8 +5,7 @@ const geoip2Reader = require('@maxmind/geoip2-node').Reader; const {extractPeerId} = require('./peerjs-server'); const {createAdapter} = require("@socket.io/redis-adapter"); const {createClient} = require("redis"); - -var wsRouter = express.Router(); +const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; const IDENTITIES = {agent: 'agent', session: 'session'}; const NEW_AGENT = "NEW_AGENT"; @@ -15,14 +14,37 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED"; const AGENTS_CONNECTED = "AGENTS_CONNECTED"; const NO_SESSIONS = "SESSION_DISCONNECTED"; const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED"; -// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000; +const REDIS_URL = process.env.REDIS_URL || "redis://localhost:6379"; +const pubClient = createClient({url: REDIS_URL}); +const subClient = pubClient.duplicate(); let io; const debug = process.env.debug === "1" || false; -const REDIS_URL = process.env.REDIS_URL || "redis://localhost:6379"; -const pubClient = createClient({url: REDIS_URL}); -const subClient = pubClient.duplicate(); +const createSocketIOServer = function (server) { + if (process.env.uws !== "true") { + io = _io(server, { + maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + cors: { + origin: "*", + methods: ["GET", "POST", "PUT"] + }, + path: '/socket' + }); + } else { + io = new _io.Server({ + maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + cors: { + origin: "*", + methods: ["GET", "POST", "PUT"] + }, + path: '/socket', + // transports: ['websocket'], + // upgrade: false + }); + io.attachApp(server); + } +} const uniqueSessions = function (data) { let resArr = []; @@ -36,18 +58,40 @@ const uniqueSessions = function (data) { return resArr; } -const socketsList = async function (req, res) { - debug && console.log("[WS]looking for all available sessions"); - let liveSessions = {}; - let rooms = await io.of('/').adapter.allRooms(); - for (let peerId of rooms) { - let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey !== undefined) { - liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(sessionId); +const extractUserIdFromRequest = function (req) { + if (process.env.uws === "true") { + if (req.getQuery("userId")) { + debug && console.log(`[WS]where userId=${req.getQuery("userId")}`); + return req.getQuery("userId"); } + } else if (req.query.userId) { + debug && console.log(`[WS]where userId=${req.query.userId}`); + return req.query.userId; } - let result = {"data": liveSessions}; + return undefined; +} + +const extractProjectKeyFromRequest = function (req) { + if (process.env.uws === "true") { + if (req.getParameter(0)) { + debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`); + return req.getParameter(0); + } + } else if (req.params.projectKey) { + debug && console.log(`[WS]where projectKey=${req.params.projectKey}`); + return req.params.projectKey; + } + return undefined; +} + + +const getAvailableRooms = async function () { + let rooms = await io.of('/').adapter.allRooms(); + return rooms; +} + +const respond = function (res, data) { + let result = {data} if (process.env.uws !== "true") { res.statusCode = 200; res.setHeader('Content-Type', 'application/json'); @@ -56,37 +100,64 @@ const socketsList = async function (req, res) { res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); } } + +const socketsList = async function (req, res) { + debug && console.log("[WS]looking for all available sessions"); + let userId = extractUserIdFromRequest(req); + + let liveSessions = {}; + let rooms = await getAvailableRooms(); + for (let peerId of rooms) { + let {projectKey, sessionId} = extractPeerId(peerId); + if (projectKey !== undefined) { + liveSessions[projectKey] = liveSessions[projectKey] || []; + if (userId) { + const connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(sessionId); + } + } + } else { + liveSessions[projectKey].push(sessionId); + } + } + } + respond(res, liveSessions); +} wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { - if (process.env.uws === "true") { - req.params = {projectKey: req.getParameter(0)}; - } - debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`); + debug && console.log("[WS]looking for available sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let userId = extractUserIdFromRequest(req); let liveSessions = {}; - let rooms = await io.of('/').adapter.allRooms(); + let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === req.params.projectKey) { + if (projectKey === _projectKey) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(sessionId); + if (userId) { + const connected_sockets = await io.in(peerId).fetchSockets(); + for (let item of connected_sockets) { + if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(sessionId); + } + } + } else { + liveSessions[projectKey].push(sessionId); + } } } - let result = {"data": liveSessions[req.params.projectKey] || []}; - if (process.env.uws !== "true") { - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(result)); - } else { - res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); - } + respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); + let userId = extractUserIdFromRequest(req); let liveSessions = {}; - let rooms = await io.of('/').adapter.allRooms(); + let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); if (projectKey !== undefined) { @@ -94,51 +165,48 @@ const socketsLive = async function (req, res) { for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(item.handshake.query.sessionInfo); + if (userId) { + if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } + } else { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } } } - liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey]); + liveSessions[projectKey] = uniqueSessions(liveSessions[_projectKey]); } } - let result = {"data": liveSessions}; - if (process.env.uws !== "true") { - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(result)); - } else { - res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); - } + respond(res, liveSessions); } wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { - if (process.env.uws === "true") { - req.params = {projectKey: req.getParameter(0)}; - } - debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`); + debug && console.log("[WS]looking for available LIVE sessions"); + let _projectKey = extractProjectKeyFromRequest(req); + let userId = extractUserIdFromRequest(req); let liveSessions = {}; - let rooms = await io.of('/').adapter.allRooms(); + let rooms = await getAvailableRooms(); for (let peerId of rooms) { let {projectKey, sessionId} = extractPeerId(peerId); - if (projectKey === req.params.projectKey) { + if (projectKey === _projectKey) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { if (item.handshake.query.identity === IDENTITIES.session) { liveSessions[projectKey] = liveSessions[projectKey] || []; - liveSessions[projectKey].push(item.handshake.query.sessionInfo); + if (userId) { + if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } + } else { + liveSessions[projectKey].push(item.handshake.query.sessionInfo); + } } } - liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey]); + liveSessions[projectKey] = uniqueSessions(liveSessions[_projectKey]); } } - let result = {"data": liveSessions[req.params.projectKey] || []}; - if (process.env.uws !== "true") { - res.statusCode = 200; - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(result)); - } else { - res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result)); - } + respond(res, liveSessions[_projectKey] || []); } wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject); @@ -220,34 +288,12 @@ function extractSessionInfo(socket) { module.exports = { wsRouter, start: (server) => { - if (process.env.uws !== "true") { - io = _io(server, { - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, - cors: { - origin: "*", - methods: ["GET", "POST", "PUT"] - }, - path: '/socket' - }); - } else { - io = new _io.Server({ - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, - cors: { - origin: "*", - methods: ["GET", "POST", "PUT"] - }, - path: '/socket', - // transports: ['websocket'], - // upgrade: false - }); - io.attachApp(server); - } - + createSocketIOServer(server); io.on('connection', async (socket) => { debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`); socket.peerId = socket.handshake.query.peerId; socket.identity = socket.handshake.query.identity; - let {projectKey, sessionId} = extractPeerId(socket.peerId); + const {projectKey, sessionId} = extractPeerId(socket.peerId); socket.sessionId = sessionId; socket.projectKey = projectKey; socket.lastMessageReceivedAt = Date.now(); diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index 7e5f062fa..63559e11b 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -337,10 +337,10 @@ module.exports = { socket.onAny(async (eventName, ...args) => { socket.lastMessageReceivedAt = Date.now(); if (socket.identity === IDENTITIES.session) { - debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}, members: ${io.sockets.adapter.rooms.get(socket.peerId).size}`); + debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}`); socket.to(socket.peerId).emit(eventName, args[0]); } else { - debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}, members:${io.sockets.adapter.rooms.get(socket.peerId).size}`); + debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}`); let socketId = await findSessionSocketId(io, socket.peerId); if (socketId === null) { debug && console.log(`session not found for:${socket.peerId}`); @@ -354,7 +354,7 @@ module.exports = { }); console.log("WS server started") - setInterval((io) => { + setInterval(async (io) => { try { let count = 0; console.log(` ====== Rooms: ${io.sockets.adapter.rooms.size} ====== `); diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index 66d0fcde0..dd4c012dc 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -308,10 +308,10 @@ module.exports = { socket.onAny(async (eventName, ...args) => { socket.lastMessageReceivedAt = Date.now(); if (socket.identity === IDENTITIES.session) { - debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}, members: ${io.sockets.adapter.rooms.get(socket.peerId).size}`); + debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}`); socket.to(socket.peerId).emit(eventName, args[0]); } else { - debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}, members:${io.sockets.adapter.rooms.get(socket.peerId).size}`); + debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}`); let socketId = await findSessionSocketId(io, socket.peerId); if (socketId === null) { debug && console.log(`session not found for:${socket.peerId}`); @@ -325,7 +325,7 @@ module.exports = { }); console.log("WS server started") - setInterval((io) => { + setInterval(async (io) => { try { let count = 0; console.log(` ====== Rooms: ${io.sockets.adapter.rooms.size} ====== `); From 729d5715b45beaab3adae6647fb7cf87d0787e64 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 16 Mar 2022 16:02:47 +0100 Subject: [PATCH 52/60] feat(utilities): FOSS&EE assist socket creation customize path --- ee/utilities/servers/websocket-cluster.js | 10 +++++----- ee/utilities/servers/websocket.js | 10 +++++----- utilities/servers/websocket.js | 8 ++++---- 3 files changed, 14 insertions(+), 14 deletions(-) diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index f1cba3014..c044043a5 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -21,7 +21,7 @@ const subClient = pubClient.duplicate(); let io; const debug = process.env.debug === "1" || false; -const createSocketIOServer = function (server) { +const createSocketIOServer = function (server, prefix) { if (process.env.uws !== "true") { io = _io(server, { maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, @@ -29,7 +29,7 @@ const createSocketIOServer = function (server) { origin: "*", methods: ["GET", "POST", "PUT"] }, - path: '/socket' + path: (prefix ? prefix : '') + '/socket' }); } else { io = new _io.Server({ @@ -38,7 +38,7 @@ const createSocketIOServer = function (server) { origin: "*", methods: ["GET", "POST", "PUT"] }, - path: '/socket', + path: (prefix ? prefix : '') + '/socket' // transports: ['websocket'], // upgrade: false }); @@ -287,8 +287,8 @@ function extractSessionInfo(socket) { module.exports = { wsRouter, - start: (server) => { - createSocketIOServer(server); + start: (server, prefix) => { + createSocketIOServer(server, prefix); io.on('connection', async (socket) => { debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`); socket.peerId = socket.handshake.query.peerId; diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index 63559e11b..0bd397d96 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -16,7 +16,7 @@ const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED"; let io; const debug = process.env.debug === "1" || false; -const createSocketIOServer = function (server) { +const createSocketIOServer = function (server, prefix) { if (process.env.uws !== "true") { io = _io(server, { maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, @@ -24,7 +24,7 @@ const createSocketIOServer = function (server) { origin: "*", methods: ["GET", "POST", "PUT"] }, - path: '/socket' + path: (prefix ? prefix : '') + '/socket' }); } else { io = new _io.Server({ @@ -33,7 +33,7 @@ const createSocketIOServer = function (server) { origin: "*", methods: ["GET", "POST", "PUT"] }, - path: '/socket', + path: (prefix ? prefix : '') + '/socket' // transports: ['websocket'], // upgrade: false }); @@ -265,8 +265,8 @@ function extractSessionInfo(socket) { module.exports = { wsRouter, - start: (server) => { - createSocketIOServer(server); + start: (server, prefix) => { + createSocketIOServer(server, prefix); io.on('connection', async (socket) => { debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`); socket.peerId = socket.handshake.query.peerId; diff --git a/utilities/servers/websocket.js b/utilities/servers/websocket.js index dd4c012dc..772bd7315 100644 --- a/utilities/servers/websocket.js +++ b/utilities/servers/websocket.js @@ -16,14 +16,14 @@ const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED"; let io; const debug = process.env.debug === "1" || false; -const createSocketIOServer = function (server) { +const createSocketIOServer = function (server, prefix) { io = _io(server, { maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, cors: { origin: "*", methods: ["GET", "POST", "PUT"] }, - path: '/socket' + path: (prefix ? prefix : '') + '/socket' }); } @@ -236,8 +236,8 @@ function extractSessionInfo(socket) { module.exports = { wsRouter, - start: (server) => { - createSocketIOServer(server); + start: (server, prefix) => { + createSocketIOServer(server, prefix); io.on('connection', async (socket) => { debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`); socket.peerId = socket.handshake.query.peerId; From c45d87d12ec10405bedbd6cc742c5a58a8b72388 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 16 Mar 2022 16:25:38 +0100 Subject: [PATCH 53/60] feat(api): assist search by userId --- api/chalicelib/core/assist.py | 5 +++-- api/routers/core.py | 11 ++--------- 2 files changed, 5 insertions(+), 11 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 70f563ec8..44adfe2d1 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -64,9 +64,10 @@ def get_live_sessions(project_id, filters=None): return helper.list_to_camel_case(results) -def get_live_sessions_ws(project_id): +def get_live_sessions_ws(project_id, user_id=None): project_key = projects.get_project_key(project_id) - connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}") + connected_peers = requests.get(config("peers") % config("S3_KEY") \ + + f"/{project_key}" + (f"?userId={user_id}" if user_id else "")) if connected_peers.status_code != 200: print("!! issue with the peer-server") print(connected_peers.text) diff --git a/api/routers/core.py b/api/routers/core.py index df9ce0e8f..97a749429 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -820,15 +820,8 @@ def all_issue_types(context: schemas.CurrentContext = Depends(OR_context)): @app.get('/{projectId}/assist/sessions', tags=["assist"]) -def sessions_live(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - data = assist.get_live_sessions_ws(projectId) - return {'data': data} - - -@app.post('/{projectId}/assist/sessions', tags=["assist"]) -def sessions_live_search(projectId: int, data: schemas.AssistSearchPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - data = assist.get_live_sessions_ws(projectId) +def sessions_live(projectId: int, userId: str = None, context: schemas.CurrentContext = Depends(OR_context)): + data = assist.get_live_sessions_ws(projectId, user_id=userId) return {'data': data} From f09c29c7b468ed4f07bd2d7f83106841e40e6255 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 16 Mar 2022 16:41:38 +0100 Subject: [PATCH 54/60] feat(api): changed assist search by userId --- api/chalicelib/core/assist.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 44adfe2d1..e4b1d4a7e 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -66,8 +66,10 @@ def get_live_sessions(project_id, filters=None): def get_live_sessions_ws(project_id, user_id=None): project_key = projects.get_project_key(project_id) - connected_peers = requests.get(config("peers") % config("S3_KEY") \ - + f"/{project_key}" + (f"?userId={user_id}" if user_id else "")) + params = {} + if user_id and len(user_id) > 0: + params["userId"] = user_id + connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}", params) if connected_peers.status_code != 200: print("!! issue with the peer-server") print(connected_peers.text) From a71905b4a3c20f7a36148abb09da4b0ea465f12c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 17 Mar 2022 19:36:46 +0100 Subject: [PATCH 55/60] feat(api): refactored sessions-search-builder feat(api): changed global sessions-search feat(api): group by userID sessions-search sort support --- api/chalicelib/core/alerts_processor.py | 2 +- api/chalicelib/core/sessions.py | 64 +++++++++++++++---------- 2 files changed, 39 insertions(+), 27 deletions(-) diff --git a/api/chalicelib/core/alerts_processor.py b/api/chalicelib/core/alerts_processor.py index e4579826f..56fde11da 100644 --- a/api/chalicelib/core/alerts_processor.py +++ b/api/chalicelib/core/alerts_processor.py @@ -102,7 +102,7 @@ def Build(a): a["filter"]["order"] = "DESC" a["filter"]["startDate"] = -1 a["filter"]["endDate"] = TimeUTC.now() - full_args, query_part, sort = sessions.search_query_parts( + full_args, query_part= sessions.search_query_parts( data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False, issue=None, project_id=a["projectId"], user_id=None, favorite_only=False) subQ = f"""SELECT COUNT(session_id) AS value diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index dee3c33cf..8a191c981 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -170,9 +170,9 @@ def _isUndefined_operator(op: schemas.SearchEventOperator): @dev.timed def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, error_status=schemas.ErrorStatus.all, count_only=False, issue=None): - full_args, query_part, sort = search_query_parts(data=data, error_status=error_status, errors_only=errors_only, - favorite_only=data.bookmarked, issue=issue, project_id=project_id, - user_id=user_id) + full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only, + favorite_only=data.bookmarked, issue=issue, project_id=project_id, + user_id=user_id) if data.limit is not None and data.page is not None: full_args["sessions_limit_s"] = (data.page - 1) * data.limit full_args["sessions_limit_e"] = data.page * data.limit @@ -199,6 +199,17 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e COUNT(DISTINCT s.user_uuid) AS count_users {query_part};""", full_args) elif data.group_by_user: + g_sort = "count(full_sessions)" + if data.order is None: + data.order = "DESC" + else: + data.order = data.order.upper() + if data.sort is not None and data.sort != 'sessionsCount': + sort = helper.key_to_snake_case(data.sort) + g_sort = f"{'MIN' if data.order == 'DESC' else 'MAX'}({sort})" + else: + sort = 'start_ts' + meta_keys = metadata.get(project_id=project_id) main_query = cur.mogrify(f"""SELECT COUNT(*) AS count, COALESCE(JSONB_AGG(users_sessions) @@ -207,29 +218,36 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e count(full_sessions) AS user_sessions_count, jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session, MIN(full_sessions.start_ts) AS first_session_ts, - ROW_NUMBER() OVER (ORDER BY count(full_sessions) DESC) AS rn - FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY start_ts DESC) AS rn - FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} - {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} - {query_part} - ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions - GROUP BY user_id - ORDER BY user_sessions_count DESC) AS users_sessions;""", + ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order}) AS rn + FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order}) AS rn + FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} + {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} + {query_part} + ) AS filtred_sessions + ) AS full_sessions + GROUP BY user_id + ) AS users_sessions;""", full_args) else: + if data.order is None: + data.order = "DESC" + sort = 'session_id' + if data.sort is not None and data.sort != "session_id": + sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) + else: + sort = 'session_id' + meta_keys = metadata.get(project_id=project_id) main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, COALESCE(JSONB_AGG(full_sessions) FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions - FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY favorite DESC, issue_score DESC, session_id desc, start_ts desc) AS rn + FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY issue_score DESC, {sort} {data.order}, session_id desc) AS rn FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} {query_part} ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions;""", + ORDER BY issue_score DESC, {sort} {data.order}) AS full_sessions;""", full_args) - # print("--------------------") # print(main_query) # print("--------------------") @@ -275,9 +293,9 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d elif metric_of == schemas.TableMetricOfType.issues and len(metric_value) > 0: data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue, operator=schemas.SearchEventOperator._is)) - full_args, query_part, sort = search_query_parts(data=data, error_status=None, errors_only=False, - favorite_only=False, issue=None, project_id=project_id, - user_id=None, extra_event=extra_event) + full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False, + favorite_only=False, issue=None, project_id=project_id, + user_id=None, extra_event=extra_event) full_args["step_size"] = step_size sessions = [] with pg_client.PostgresClient() as cur: @@ -951,13 +969,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr # elif data.platform == schemas.PlatformType.desktop: # extra_constraints.append( # b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')") - if data.order is None: - data.order = "DESC" - sort = 'session_id' - if data.sort is not None and data.sort != "session_id": - sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) - else: - sort = 'session_id' + if errors_only: extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)" extra_constraints.append("ser.source = 'js_exception'") @@ -1002,7 +1014,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr {extra_from} WHERE {" AND ".join(extra_constraints)}""" - return full_args, query_part, sort + return full_args, query_part def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None): From 18a1071060a921b9cbf2f9e110cb3b202412419b Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Fri, 18 Mar 2022 17:27:49 +0100 Subject: [PATCH 56/60] fix(backend): trim rawURL string before resolving --- backend/pkg/url/assets/url.go | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/backend/pkg/url/assets/url.go b/backend/pkg/url/assets/url.go index b087878b9..1fe717531 100644 --- a/backend/pkg/url/assets/url.go +++ b/backend/pkg/url/assets/url.go @@ -9,16 +9,17 @@ import ( func getSessionKey(sessionID uint64) string { // Based on timestamp, changes once per week. Check pkg/flakeid for understanding sessionID - return strconv.FormatUint(sessionID>>50, 10) + return strconv.FormatUint(sessionID>>50, 10) } func ResolveURL(baseurl string, rawurl string) string { + rawurl = strings.Trim(rawurl, " ") if !isRelativeCachable(rawurl) { return rawurl } base, _ := url.ParseRequestURI(baseurl) // fn Only for base urls - u, _ := url.Parse(rawurl) // TODO: handle errors ? - if base == nil || u == nil { + u, _ := url.Parse(rawurl) // TODO: handle errors ? + if base == nil || u == nil { return rawurl } return base.ResolveReference(u).String() // ResolveReference same as base.Parse(rawurl) @@ -71,22 +72,20 @@ func GetCachePathForJS(rawurl string) string { } func GetCachePathForAssets(sessionID uint64, rawurl string) string { - return getCachePathWithKey(sessionID, rawurl) + return getCachePathWithKey(sessionID, rawurl) } - func (r *Rewriter) RewriteURL(sessionID uint64, baseURL string, relativeURL string) string { fullURL, cachable := GetFullCachableURL(baseURL, relativeURL) if !cachable { return fullURL } - u := url.URL{ - Path: r.assetsURL.Path + getCachePathWithKey(sessionID, fullURL), - Host: r.assetsURL.Host, - Scheme: r.assetsURL.Scheme, + u := url.URL{ + Path: r.assetsURL.Path + getCachePathWithKey(sessionID, fullURL), + Host: r.assetsURL.Host, + Scheme: r.assetsURL.Scheme, } return u.String() } - From be58d4e75479d21465cd16603eecb30ba583f737 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Fri, 18 Mar 2022 20:04:31 +0100 Subject: [PATCH 57/60] fix (tracker): 3.5.4: shadowDOM fix - extended 'inDocument' check --- tracker/tracker/package.json | 2 +- tracker/tracker/src/main/app/context.ts | 53 ++++++++++++++----- .../tracker/src/main/app/observer/observer.ts | 9 ++-- .../src/main/app/observer/top_observer.ts | 28 +++++----- 4 files changed, 58 insertions(+), 34 deletions(-) diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index e485faf11..6df50033b 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "3.5.3", + "version": "3.5.4", "keywords": [ "logging", "replay" diff --git a/tracker/tracker/src/main/app/context.ts b/tracker/tracker/src/main/app/context.ts index aa9a5dfb3..781f91ea8 100644 --- a/tracker/tracker/src/main/app/context.ts +++ b/tracker/tracker/src/main/app/context.ts @@ -41,32 +41,57 @@ export function isInstance(node: Node, constr: Cons // @ts-ignore (for EI, Safary) doc.parentWindow || doc.defaultView; // TODO: smart global typing for Window object - while(context.parent && context.parent !== context) { + while((context.parent || context.top) && context.parent !== context) { // @ts-ignore if (node instanceof context[constr.name]) { return true } // @ts-ignore - context = context.parent + context = context.parent || context.top } // @ts-ignore return node instanceof context[constr.name] } -export function inDocument(node: Node): boolean { +// TODO: ensure 1. it works in every cases (iframes/detached nodes) and 2. the most efficient +export function inDocument(node: Node) { const doc = node.ownerDocument - if (!doc) { return false } - if (doc.contains(node)) { return true } - let context: Window = - // @ts-ignore (for EI, Safary) - doc.parentWindow || - doc.defaultView; - while(context.parent && context.parent !== context) { - if (context.document.contains(node)) { + if (!doc) { return true } // Document + let current: Node | null = node + while(current) { + if (current === doc) { return true + } else if(isInstance(current, ShadowRoot)) { + current = current.host + } else { + current = current.parentNode } - // @ts-ignore - context = context.parent } - return false; + return false } + +// export function inDocument(node: Node): boolean { +// // @ts-ignore compatability +// if (node.getRootNode) { +// let root: Node +// while ((root = node.getRootNode()) !== node) { +// //// +// } +// } + +// const doc = node.ownerDocument +// if (!doc) { return false } +// if (doc.contains(node)) { return true } +// let context: Window = +// // @ts-ignore (for EI, Safary) +// doc.parentWindow || +// doc.defaultView; +// while(context.parent && context.parent !== context) { +// if (context.document.contains(node)) { +// return true +// } +// // @ts-ignore +// context = context.parent +// } +// return false; +// } diff --git a/tracker/tracker/src/main/app/observer/observer.ts b/tracker/tracker/src/main/app/observer/observer.ts index 0f4ff2994..06823e07c 100644 --- a/tracker/tracker/src/main/app/observer/observer.ts +++ b/tracker/tracker/src/main/app/observer/observer.ts @@ -1,4 +1,3 @@ -import { hasOpenreplayAttribute } from "../../utils.js"; import { RemoveNodeAttribute, SetNodeAttribute, @@ -59,9 +58,7 @@ export default abstract class Observer { private readonly indexes: Array = []; private readonly attributesList: Array | undefined> = []; private readonly textSet: Set = new Set(); - private readonly inUpperContext: boolean; - constructor(protected readonly app: App, protected readonly context: Window = window) { - this.inUpperContext = context.parent === context //TODO: get rid of context here + constructor(protected readonly app: App, protected readonly isTopContext = false) { this.observer = new MutationObserver( this.app.safe((mutations) => { for (const mutation of mutations) { @@ -226,7 +223,7 @@ export default abstract class Observer { // Disable parent check for the upper context HTMLHtmlElement, because it is root there... (before) // TODO: get rid of "special" cases (there is an issue with CreateDocument altered behaviour though) // TODO: Clean the logic (though now it workd fine) - if (!isInstance(node, HTMLHtmlElement) || !this.inUpperContext) { + if (!isInstance(node, HTMLHtmlElement) || !this.isTopContext) { if (parent === null) { this.unbindNode(node); return false; @@ -321,6 +318,8 @@ export default abstract class Observer { for (let id = 0; id < this.recents.length; id++) { // TODO: make things/logic nice here. // commit required in any case if recents[id] true or false (in case of unbinding) or undefined (in case of attr change). + // Possible solution: separate new node commit (recents) and new attribute/move node commit + // Otherwise commitNode is called on each node, which might be a lot if (!this.myNodes[id]) { continue } this.commitNode(id); if (this.recents[id] === true && (node = this.app.nodes.getNode(id))) { diff --git a/tracker/tracker/src/main/app/observer/top_observer.ts b/tracker/tracker/src/main/app/observer/top_observer.ts index b35f5d901..14bed9768 100644 --- a/tracker/tracker/src/main/app/observer/top_observer.ts +++ b/tracker/tracker/src/main/app/observer/top_observer.ts @@ -6,7 +6,7 @@ import ShadowRootObserver from "./shadow_root_observer.js"; import { CreateDocument } from "../../../messages/index.js"; import App from "../index.js"; -import { IN_BROWSER } from '../../utils.js' +import { IN_BROWSER, hasOpenreplayAttribute } from '../../utils.js' export interface Options { captureIFrames: boolean @@ -17,15 +17,16 @@ const attachShadowNativeFn = IN_BROWSER ? Element.prototype.attachShadow : ()=>n export default class TopObserver extends Observer { private readonly options: Options; constructor(app: App, options: Partial) { - super(app); + super(app, true); this.options = Object.assign({ - captureIFrames: false + captureIFrames: true }, options); // IFrames this.app.nodes.attachNodeCallback(node => { if (isInstance(node, HTMLIFrameElement) && - (this.options.captureIFrames || node.getAttribute("data-openreplay-capture")) + ((this.options.captureIFrames && !hasOpenreplayAttribute(node, "obscured")) + || hasOpenreplayAttribute(node, "capture")) ) { this.handleIframe(node) } @@ -42,26 +43,25 @@ export default class TopObserver extends Observer { private iframeObservers: IFrameObserver[] = []; private handleIframe(iframe: HTMLIFrameElement): void { - let context: Window | null = null + let doc: Document | null = null const handle = this.app.safe(() => { const id = this.app.nodes.getID(iframe) if (id === undefined) { return } //log - if (iframe.contentWindow === context) { return } //Does this happen frequently? - context = iframe.contentWindow as Window | null; - if (!context) { return } - const observer = new IFrameObserver(this.app, context) + if (iframe.contentDocument === doc) { return } // How frequently can it happen? + doc = iframe.contentDocument + if (!doc || !iframe.contentWindow) { return } + const observer = new IFrameObserver(this.app) this.iframeObservers.push(observer) observer.observe(iframe) }) - this.app.attachEventListener(iframe, "load", handle) + iframe.addEventListener("load", handle) // why app.attachEventListener not working? handle() } private shadowRootObservers: ShadowRootObserver[] = [] private handleShadowRoot(shRoot: ShadowRoot) { - const observer = new ShadowRootObserver(this.app, this.context) - + const observer = new ShadowRootObserver(this.app) this.shadowRootObservers.push(observer) observer.observe(shRoot.host) } @@ -81,9 +81,9 @@ export default class TopObserver extends Observer { // the change in the re-player behaviour caused by CreateDocument message: // the 0-node ("fRoot") will become #document rather than documentElement as it is now. // Alternatively - observe(#document) then bindNode(documentElement) - this.observeRoot(this.context.document, () => { + this.observeRoot(window.document, () => { this.app.send(new CreateDocument()) - }, this.context.document.documentElement); + }, window.document.documentElement); } disconnect() { From 1799a4a5665e429a5eee221a951b1bb6c8d71732 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Fri, 18 Mar 2022 20:06:57 +0100 Subject: [PATCH 58/60] fix(frontend-player):corect iframe document init --- .../MessageDistributor/managers/DOMManager.ts | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/frontend/app/player/MessageDistributor/managers/DOMManager.ts b/frontend/app/player/MessageDistributor/managers/DOMManager.ts index 7c40a4668..685a34f2c 100644 --- a/frontend/app/player/MessageDistributor/managers/DOMManager.ts +++ b/frontend/app/player/MessageDistributor/managers/DOMManager.ts @@ -113,8 +113,15 @@ export default class DOMManager extends ListWalker { logger.error("Node has no childNodes", this.nl[ parentID ]); return; } + + if (this.nl[ id ] instanceof HTMLHtmlElement) { + // What if some exotic cases? + this.nl[ parentID ].replaceChild(this.nl[ id ], childNodes[childNodes.length-1]) + return + } + this.nl[ parentID ] - .insertBefore(this.nl[ id ], childNodes[ index ]); + .insertBefore(this.nl[ id ], childNodes[ index ]) } private applyMessage = (msg: Message): void => { @@ -257,14 +264,14 @@ export default class DOMManager extends ListWalker { case "create_i_frame_document": node = this.nl[ msg.frameID ]; // console.log('ifr', msg, node) - + if (node instanceof HTMLIFrameElement) { doc = node.contentDocument; if (!doc) { logger.warn("No iframe doc", msg, node, node.contentDocument); return; } - this.nl[ msg.id ] = doc.documentElement + this.nl[ msg.id ] = doc return; } else if (node instanceof Element) { // shadow DOM try { From 732d31684b617eb31fc8377adf3aed412f7eb7d8 Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Fri, 18 Mar 2022 20:15:15 +0100 Subject: [PATCH 59/60] feat(tracker-redux):3.5.0:clear encoder on stop --- tracker/tracker-redux/package-lock.json | 1147 ++++++++++++++++++++++- tracker/tracker-redux/package.json | 6 +- tracker/tracker-redux/src/index.ts | 5 +- tracker/tracker-redux/tsconfig.json | 2 +- 4 files changed, 1150 insertions(+), 10 deletions(-) diff --git a/tracker/tracker-redux/package-lock.json b/tracker/tracker-redux/package-lock.json index 3cb97282f..00aecf7ec 100644 --- a/tracker/tracker-redux/package-lock.json +++ b/tracker/tracker-redux/package-lock.json @@ -1,8 +1,1121 @@ { "name": "@openreplay/tracker-redux", - "version": "3.0.0", - "lockfileVersion": 1, + "version": "3.5.0", + "lockfileVersion": 2, "requires": true, + "packages": { + "": { + "name": "@openreplay/tracker-redux", + "version": "3.5.0", + "license": "MIT", + "devDependencies": { + "@openreplay/tracker": "^3.5.0", + "prettier": "^1.18.2", + "replace-in-files-cli": "^1.0.0", + "typescript": "^4.6.0-dev.20211126" + }, + "peerDependencies": { + "@openreplay/tracker": "^3.5.0", + "redux": "^4.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.13.tgz", + "integrity": "sha512-HV1Cm0Q3ZrpCR93tkWOYiuYIgLxZXZFVG2VgK+MBWjUqZTundupbfx2aXarXuw5Ko5aMcjtJgbSs4vUGBS5v6g==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.12.13" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.12.11", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz", + "integrity": "sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw==", + "dev": true + }, + "node_modules/@babel/highlight": { + "version": "7.13.10", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.13.10.tgz", + "integrity": "sha512-5aPpe5XQPzflQrFwL1/QoeHkP2MsA4JCntcXHRhEsdsfPVkvPi2w7Qix4iV7t5S/oC9OodGrggd8aco1g3SZFg==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.12.11", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + } + }, + "node_modules/@babel/runtime": { + "version": "7.17.2", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.17.2.tgz", + "integrity": "sha512-hzeyJyMA1YGdJTuWU0e/j4wKXrU4OMFvY2MSlaI9B7VQb0r5cxTE3EAIS2Q7Tn2RIcDkRvTA/v2JsAEhxe99uw==", + "peer": true, + "dependencies": { + "regenerator-runtime": "^0.13.4" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.4.tgz", + "integrity": "sha512-33g3pMJk3bg5nXbL/+CY6I2eJDzZAni49PfJnL5fghPTggPvBd/pFNSgJsdAgWptuFu7qq/ERvOYFlhvsLTCKA==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.4", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.4.tgz", + "integrity": "sha512-IYlHJA0clt2+Vg7bccq+TzRdJvv19c2INqBSsoOLp1je7xjtr7J26+WXR72MCdvU9q1qTzIWDfhMf+DRvQJK4Q==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.6.tgz", + "integrity": "sha512-8Broas6vTtW4GIXTAHDoE32hnN2M5ykgCpWGbuXHQ15vEMqr23pB76e/GZcYsZCHALv50ktd24qhEyKr6wBtow==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.4", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@openreplay/tracker": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.5.2.tgz", + "integrity": "sha512-b0/BCFRQW4afh/k1cYhudbszmdkTQu7GBob8MYzd0vuWLMx6muXv2oSXHsyc3cro9fWrymQPeRZV3zrpNb5ioA==", + "dev": true, + "dependencies": { + "error-stack-parser": "^2.0.6" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@types/minimist": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.1.tgz", + "integrity": "sha512-fZQQafSREFyuZcdWFAExYjBiCL7AUCdgsk80iO0q4yihYYdcIiH28CcuPTGFgLOCC8RlW49GSQxdHwZP+I7CNg==", + "dev": true + }, + "node_modules/@types/normalize-package-data": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz", + "integrity": "sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA==", + "dev": true + }, + "node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase-keys": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz", + "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==", + "dev": true, + "dependencies": { + "camelcase": "^5.3.1", + "map-obj": "^4.0.0", + "quick-lru": "^4.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chalk/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "node_modules/decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/decamelize-keys": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.0.tgz", + "integrity": "sha1-0XGoeTMlKAfrPLYdwcFEXQeN8tk=", + "dev": true, + "dependencies": { + "decamelize": "^1.1.0", + "map-obj": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/decamelize-keys/node_modules/map-obj": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", + "integrity": "sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/error-stack-parser": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/error-stack-parser/-/error-stack-parser-2.0.6.tgz", + "integrity": "sha512-d51brTeqC+BHlwF0BhPtcYgF5nlzf9ZZ0ZIUQNZpc9ZB9qw5IJ2diTrBY9jlCJkTLITYPjmiX6OWCwH+fuyNgQ==", + "dev": true, + "dependencies": { + "stackframe": "^1.1.1" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/fast-glob": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.5.tgz", + "integrity": "sha512-2DtFcgT68wiTTiwZ2hNdJfcHNke9XOfnwmBRWXhmeKM8rF0TGwmC/Qto3S7RoZKp5cilZbxzO5iTNTQsJ+EeDg==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.0", + "merge2": "^1.3.0", + "micromatch": "^4.0.2", + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fastq": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.11.0.tgz", + "integrity": "sha512-7Eczs8gIPDrVzT+EksYBcupqMyxSHXXrHOLRRxU2/DicV8789MRBRR8+Hc2uWzUupOs4YS4JzBmBxjjCVBxD/g==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/globby": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.0.3.tgz", + "integrity": "sha512-ffdmosjA807y7+lA1NM0jELARVmYul/715xiILEjo3hBLPTcirgQNnXECn5g3mtR8TOLCVbkfua1Hpen25/Xcg==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.1.1", + "ignore": "^5.1.4", + "merge2": "^1.3.0", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/hard-rejection": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz", + "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/hosted-git-info": { + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", + "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==", + "dev": true + }, + "node_modules/ignore": { + "version": "5.1.8", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.8.tgz", + "integrity": "sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true + }, + "node_modules/is-core-module": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.2.0.tgz", + "integrity": "sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ==", + "dev": true, + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", + "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", + "integrity": "sha1-caUMhCnfync8kqOQpKA7OfzVHT4=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", + "dev": true + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/lines-and-columns": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz", + "integrity": "sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=", + "dev": true + }, + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/map-obj": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.2.0.tgz", + "integrity": "sha512-NAq0fCmZYGz9UFEQyndp7sisrow4GroyGeKluyKC/chuITZsPyOyC1UJZPJlVFImhXdROIP5xqouRLThT3BbpQ==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/meow": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/meow/-/meow-7.1.1.tgz", + "integrity": "sha512-GWHvA5QOcS412WCo8vwKDlTelGLsCGBVevQB5Kva961rmNfun0PCbv5+xta2kUMFJyR8/oWnn7ddeKdosbAPbA==", + "dev": true, + "dependencies": { + "@types/minimist": "^1.2.0", + "camelcase-keys": "^6.2.2", + "decamelize-keys": "^1.1.0", + "hard-rejection": "^2.1.0", + "minimist-options": "4.1.0", + "normalize-package-data": "^2.5.0", + "read-pkg-up": "^7.0.1", + "redent": "^3.0.0", + "trim-newlines": "^3.0.0", + "type-fest": "^0.13.1", + "yargs-parser": "^18.1.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.2.tgz", + "integrity": "sha512-y7FpHSbMUMoyPbYUSzO6PaZ6FyRnQOpHuKwbo1G+Knck95XVU4QAiKdGEnj5wwoS7PlOgthX/09u5iFJ+aYf5Q==", + "dev": true, + "dependencies": { + "braces": "^3.0.1", + "picomatch": "^2.0.5" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/min-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/minimist-options": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz", + "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==", + "dev": true, + "dependencies": { + "arrify": "^1.0.1", + "is-plain-obj": "^1.1.0", + "kind-of": "^6.0.3" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/minimist-options/node_modules/arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "dependencies": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", + "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", + "dev": true + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/picomatch": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", + "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/prettier": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz", + "integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==", + "dev": true, + "bin": { + "prettier": "bin-prettier.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/quick-lru": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", + "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", + "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==", + "dev": true, + "dependencies": { + "@types/normalize-package-data": "^2.4.0", + "normalize-package-data": "^2.5.0", + "parse-json": "^5.0.0", + "type-fest": "^0.6.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg-up": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz", + "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==", + "dev": true, + "dependencies": { + "find-up": "^4.1.0", + "read-pkg": "^5.2.0", + "type-fest": "^0.8.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/read-pkg-up/node_modules/type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg/node_modules/type-fest": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", + "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/redent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", + "dev": true, + "dependencies": { + "indent-string": "^4.0.0", + "strip-indent": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/redux": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/redux/-/redux-4.1.2.tgz", + "integrity": "sha512-SH8PglcebESbd/shgf6mii6EIoRM0zrQyjcuQ+ojmfxjTtE0z9Y8pa62iA/OJ58qjP6j27uyW4kUF4jl/jd6sw==", + "peer": true, + "dependencies": { + "@babel/runtime": "^7.9.2" + } + }, + "node_modules/regenerator-runtime": { + "version": "0.13.9", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", + "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==", + "peer": true + }, + "node_modules/replace-in-files-cli": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/replace-in-files-cli/-/replace-in-files-cli-1.0.0.tgz", + "integrity": "sha512-/HMPLZeCA24CBUQ59ymHji6LyMKM+gEgDZlYsiPvXW6+3PdfOw6SsMCVd9KC2B+KlAEe/8vkJA6gfnexVdF15A==", + "dev": true, + "dependencies": { + "arrify": "^2.0.1", + "escape-string-regexp": "^4.0.0", + "globby": "^11.0.1", + "meow": "^7.1.1", + "normalize-path": "^3.0.0", + "write-file-atomic": "^3.0.0" + }, + "bin": { + "replace-in-files": "cli.js" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/resolve": { + "version": "1.20.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", + "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==", + "dev": true, + "dependencies": { + "is-core-module": "^2.2.0", + "path-parse": "^1.0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/signal-exit": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", + "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==", + "dev": true + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/spdx-correct": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", + "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==", + "dev": true, + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", + "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", + "dev": true + }, + "node_modules/spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.7.tgz", + "integrity": "sha512-U+MTEOO0AiDzxwFvoa4JVnMV6mZlJKk2sBLt90s7G0Gd0Mlknc7kxEn3nuDPNZRta7O2uy8oLcZLVT+4sqNZHQ==", + "dev": true + }, + "node_modules/stackframe": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/stackframe/-/stackframe-1.2.0.tgz", + "integrity": "sha512-GrdeshiRmS1YLMYgzF16olf2jJ/IzxXY9lhKOskuVziubpTYcYqyOwYeJKzQkwy7uN0fYSsbsC4RQaXf9LCrYA==", + "dev": true + }, + "node_modules/strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "dev": true, + "dependencies": { + "min-indent": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/trim-newlines": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.0.tgz", + "integrity": "sha512-C4+gOpvmxaSMKuEf9Qc134F1ZuOHVXKRbtEflf4NTtuuJDEIJ9p5PXsalL8SkeRw+qit1Mo+yuvMPAKwWg/1hA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/type-fest": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.13.1.tgz", + "integrity": "sha512-34R7HTnG0XIJcBSn5XhDd7nNFPRcXYRZrBB2O2jdKqYODldSzBAqzsWoZYYvduky73toYS/ESqxPvkDf/F0XMg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typedarray-to-buffer": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", + "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", + "dev": true, + "dependencies": { + "is-typedarray": "^1.0.0" + } + }, + "node_modules/typescript": { + "version": "4.6.0-dev.20211126", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.6.0-dev.20211126.tgz", + "integrity": "sha512-m+LKstqVv6FYW363aIbO6bm8awsLbeSUCzU6FxPtzUF/WJkFieQfYmdVwEIzigeTpw4E2GETBXnk6P6AixcQJQ==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/write-file-atomic": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", + "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", + "dev": true, + "dependencies": { + "imurmurhash": "^0.1.4", + "is-typedarray": "^1.0.0", + "signal-exit": "^3.0.2", + "typedarray-to-buffer": "^3.1.5" + } + }, + "node_modules/yargs-parser": { + "version": "18.1.3", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", + "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", + "dev": true, + "dependencies": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + }, + "engines": { + "node": ">=6" + } + } + }, "dependencies": { "@babel/code-frame": { "version": "7.12.13", @@ -30,6 +1143,15 @@ "js-tokens": "^4.0.0" } }, + "@babel/runtime": { + "version": "7.17.2", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.17.2.tgz", + "integrity": "sha512-hzeyJyMA1YGdJTuWU0e/j4wKXrU4OMFvY2MSlaI9B7VQb0r5cxTE3EAIS2Q7Tn2RIcDkRvTA/v2JsAEhxe99uw==", + "peer": true, + "requires": { + "regenerator-runtime": "^0.13.4" + } + }, "@nodelib/fs.scandir": { "version": "2.1.4", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.4.tgz", @@ -57,9 +1179,9 @@ } }, "@openreplay/tracker": { - "version": "3.4.8", - "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.4.8.tgz", - "integrity": "sha512-Qrvoa0MUzVHCfU3tl8c9e4pz5Ee59Z5TZWV4cR5f5yFMZtxUNsv5b5Q0B2DebYI/dDI1iKBscluvmQOrIaIAzw==", + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.5.2.tgz", + "integrity": "sha512-b0/BCFRQW4afh/k1cYhudbszmdkTQu7GBob8MYzd0vuWLMx6muXv2oSXHsyc3cro9fWrymQPeRZV3zrpNb5ioA==", "dev": true, "requires": { "error-stack-parser": "^2.0.6" @@ -623,6 +1745,21 @@ "strip-indent": "^3.0.0" } }, + "redux": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/redux/-/redux-4.1.2.tgz", + "integrity": "sha512-SH8PglcebESbd/shgf6mii6EIoRM0zrQyjcuQ+ojmfxjTtE0z9Y8pa62iA/OJ58qjP6j27uyW4kUF4jl/jd6sw==", + "peer": true, + "requires": { + "@babel/runtime": "^7.9.2" + } + }, + "regenerator-runtime": { + "version": "0.13.9", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", + "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==", + "peer": true + }, "replace-in-files-cli": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/replace-in-files-cli/-/replace-in-files-cli-1.0.0.tgz", diff --git a/tracker/tracker-redux/package.json b/tracker/tracker-redux/package.json index 87a365754..862d5b829 100644 --- a/tracker/tracker-redux/package.json +++ b/tracker/tracker-redux/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker-redux", "description": "Tracker plugin for Redux state recording", - "version": "3.4.8", + "version": "3.5.0", "keywords": [ "redux", "logging", @@ -23,11 +23,11 @@ }, "dependencies": {}, "peerDependencies": { - "@openreplay/tracker": "^3.4.8", + "@openreplay/tracker": "^3.5.0", "redux": "^4.0.0" }, "devDependencies": { - "@openreplay/tracker": "^3.4.8", + "@openreplay/tracker": "^3.5.0", "prettier": "^1.18.2", "replace-in-files-cli": "^1.0.0", "typescript": "^4.6.0-dev.20211126" diff --git a/tracker/tracker-redux/src/index.ts b/tracker/tracker-redux/src/index.ts index 5a4749e71..dfc092d9c 100644 --- a/tracker/tracker-redux/src/index.ts +++ b/tracker/tracker-redux/src/index.ts @@ -23,8 +23,11 @@ export default function(opts: Partial = {}) { return () => next => action => next(action); } const encoder = new Encoder(sha1, 50); + app.attachStopCallback(() => { + encoder.clear() + }) return ({ getState }) => next => action => { - if (!options.actionFilter(action)) { + if (!app.active() || !options.actionFilter(action)) { return next(action); } const startTime = performance.now(); diff --git a/tracker/tracker-redux/tsconfig.json b/tracker/tracker-redux/tsconfig.json index 0c5b8d1b3..ce07a685b 100644 --- a/tracker/tracker-redux/tsconfig.json +++ b/tracker/tracker-redux/tsconfig.json @@ -5,7 +5,7 @@ "alwaysStrict": true, "target": "es6", "module": "es6", - "moduleResolution": "nodenext", + "moduleResolution": "node", "declaration": true, "outDir": "./lib" } From f88b95b59b9301a6f2b40874a86a086eab1ed7ac Mon Sep 17 00:00:00 2001 From: ShiKhu Date: Fri, 18 Mar 2022 20:17:24 +0100 Subject: [PATCH 60/60] gitfix(tracker-profile): remove build code from index --- tracker/tracker-profiler/.gitignore | 1 + tracker/tracker-profiler/cjs/index.js | 18 ------------------ tracker/tracker-profiler/cjs/package.json | 1 - 3 files changed, 1 insertion(+), 19 deletions(-) delete mode 100644 tracker/tracker-profiler/cjs/index.js delete mode 100644 tracker/tracker-profiler/cjs/package.json diff --git a/tracker/tracker-profiler/.gitignore b/tracker/tracker-profiler/.gitignore index 1736ff4de..1f2395a10 100644 --- a/tracker/tracker-profiler/.gitignore +++ b/tracker/tracker-profiler/.gitignore @@ -1,4 +1,5 @@ node_modules npm-debug.log lib +cjs .cache diff --git a/tracker/tracker-profiler/cjs/index.js b/tracker/tracker-profiler/cjs/index.js deleted file mode 100644 index 6a83f0e35..000000000 --- a/tracker/tracker-profiler/cjs/index.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const tracker_1 = require("@openreplay/tracker/cjs"); -function default_1() { - return (app) => { - if (app === null) { - return (name) => (fn, thisArg) => thisArg === undefined ? fn : fn.bind(thisArg); - } - return (name) => (fn, thisArg) => (...args) => { - const startTime = performance.now(); - const result = thisArg === undefined ? fn.apply(this, args) : fn.apply(thisArg, args); - const duration = performance.now() - startTime; - app.send(tracker_1.Messages.Profiler(name, duration, args.map(String).join(', '), String(result))); - return result; - }; - }; -} -exports.default = default_1; diff --git a/tracker/tracker-profiler/cjs/package.json b/tracker/tracker-profiler/cjs/package.json deleted file mode 100644 index a3c15a7a6..000000000 --- a/tracker/tracker-profiler/cjs/package.json +++ /dev/null @@ -1 +0,0 @@ -{ "type": "commonjs" }