Merge remote-tracking branch 'origin/dev' into api-v1.5.4

This commit is contained in:
Taha Yassine Kraiem 2022-03-12 10:28:42 +01:00
commit 8c480f561b
12 changed files with 117 additions and 63 deletions

View file

@ -5,7 +5,7 @@ OpenReplay monorepo uses multiple licenses. Portions of this software are licens
- All content that resides under the "ee/" directory of this repository, is licensed under the license defined in "ee/LICENSE".
- Content outside of the above mentioned directories or restrictions above is available under the "Elastic License 2.0 (ELv2)" license as defined below.
[Reach out](mailto:license@openreplay.com) if you have any questions regarding licenses.
Reach out (license@openreplay.com) if you have any questions regarding licenses.
------------------------------------------------------------------------------------
Elastic License 2.0 (ELv2)

View file

@ -1,4 +1,5 @@
import json
from typing import List
import chalicelib.utils.helper
import schemas
@ -12,12 +13,38 @@ REMOVE_KEYS = ["key", "_key", "startDate", "endDate"]
ALLOW_UPDATE_FOR = ["name", "filter"]
# def filter_stages(stages):
# ALLOW_TYPES = [events.event_type.CLICK.ui_type, events.event_type.INPUT.ui_type,
# events.event_type.LOCATION.ui_type, events.event_type.CUSTOM.ui_type,
# events.event_type.CLICK_IOS.ui_type, events.event_type.INPUT_IOS.ui_type,
# events.event_type.VIEW_IOS.ui_type, events.event_type.CUSTOM_IOS.ui_type, ]
# return [s for s in stages if s["type"] in ALLOW_TYPES and s.get("value") is not None]
def filter_stages(stages: List[schemas._SessionSearchEventSchema]):
ALLOW_TYPES = [schemas.EventType.click, schemas.EventType.input,
schemas.EventType.location, schemas.EventType.custom,
schemas.EventType.click_ios, schemas.EventType.input_ios,
schemas.EventType.view_ios, schemas.EventType.custom_ios, ]
return [s for s in stages if s.type in ALLOW_TYPES and s.value is not None]
def __parse_events(f_events: List[dict]):
return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events]
def __unparse_events(f_events: List[schemas._SessionSearchEventSchema]):
return [e.dict() for e in f_events]
def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]):
if f_events is None:
return
events = []
for e in f_events:
if e.operator is None:
e.operator = schemas.SearchEventOperator._is
if not isinstance(e.value, list):
e.value = [e.value]
is_any = sessions._isAny_opreator(e.operator)
if not is_any and isinstance(e.value, list) and len(e.value) == 0:
continue
events.append(e)
return events
def __transform_old_funnels(events):
for e in events:
@ -28,7 +55,7 @@ def __transform_old_funnels(events):
def create(project_id, user_id, name, filter: schemas.FunnelSearchPayloadSchema, is_public):
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
# filter.events = filter_stages(stages=filter.events)
filter.events = filter_stages(stages=filter.events)
with pg_client.PostgresClient() as cur:
query = cur.mogrify("""\
INSERT INTO public.funnels (project_id, user_id, name, filter,is_public)
@ -76,6 +103,8 @@ def update(funnel_id, user_id, project_id, name=None, filter=None, is_public=Non
query
)
r = cur.fetchone()
if r is None:
return {"errors": ["funnel not found"]}
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r = helper.dict_to_camel_case(r)
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
@ -102,9 +131,9 @@ def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date
for row in rows:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
if details:
# row["filter"]["events"] = filter_stages(row["filter"]["events"])
row["filter"]["events"] = filter_stages(__parse_events(row["filter"]["events"]))
if row.get("filter") is not None and row["filter"].get("events") is not None:
row["filter"]["events"] = __transform_old_funnels(row["filter"]["events"])
row["filter"]["events"] = __transform_old_funnels(__unparse_events(row["filter"]["events"]))
get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date,
end_date=end_date)
@ -168,7 +197,8 @@ def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=No
def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema):
# data.events = filter_stages(data.events)
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
if f is None:
@ -192,17 +222,18 @@ def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_dat
"totalDropDueToIssues": total_drop_due_to_issues}}
def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data):
# data["events"] = filter_stages(data.get("events", []))
if len(data["events"]) == 0:
def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema):
data.events = filter_stages(__parse_events(data.events))
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None),
start_date=data.get('startDate', None),
end_date=data.get('endDate', None))
data = f["filter"]
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data, project_id=project_id)
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
start_date=data.startDate,
end_date=data.endDate)
data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"])
data.events = __fix_stages(data.events)
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
if len(insights) > 0:
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
return {"data": {"stages": helper.list_to_camel_case(insights),
@ -220,25 +251,26 @@ def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None
@dev.timed
def get_issues_on_the_fly(funnel_id, user_id, project_id, data):
first_stage = data.get("firstStage")
last_stage = data.get("lastStage")
# data["events"] = filter_stages(data.get("events", []))
if len(data["events"]) == 0:
def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema):
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None),
start_date=data.get('startDate', None),
end_date=data.get('endDate', None))
data = f["filter"]
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
start_date=data.startDate,
end_date=data.endDate)
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
if len(data.events) < 2:
return {"issues": []}
return {
"issues": helper.dict_to_camel_case(
significance.get_issues_list(filter_d=data, project_id=project_id, first_stage=first_stage,
last_stage=last_stage))}
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
last_stage=len(data.events)))}
def get(funnel_id, project_id, user_id, flatten=True):
def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
@ -260,7 +292,11 @@ def get(funnel_id, project_id, user_id, flatten=True):
if f.get("filter") is not None and f["filter"].get("events") is not None:
f["filter"]["events"] = __transform_old_funnels(f["filter"]["events"])
f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"])
# f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
f["filter"]["events"] = __parse_events(f["filter"]["events"])
f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
if fix_stages:
f["filter"]["events"] = __fix_stages(f["filter"]["events"])
f["filter"]["events"] = [e.dict() for e in f["filter"]["events"]]
if flatten:
f["filter"] = helper.old_search_payload_to_flat(f["filter"])
return f
@ -279,7 +315,7 @@ def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.Funn
end_date=data.endDate)
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data.dict()) \
issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data) \
.get("issues", {})
issues = issues.get("significant", []) + issues.get("insignificant", [])
issue = None

View file

@ -118,12 +118,9 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.rev_id {op} %({f_k})s', f["value"], value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
i = -1
for s in stages:
for i, s in enumerate(stages):
if i == 0:
extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
else:
extra_from = []
if s.get("operator") is None:
s["operator"] = "is"
@ -132,6 +129,11 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
is_any = sessions._isAny_opreator(s["operator"])
if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0:
continue
i += 1
if i == 0:
extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
else:
extra_from = []
op = sessions.__get_sql_operator(s["operator"])
event_type = s["type"].upper()
if event_type == events.event_type.CLICK.ui_type:
@ -213,7 +215,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
ISS.issue_id as issue_id
FROM events_common.issues AS ISE INNER JOIN issues AS ISS USING (issue_id)
WHERE ISE.timestamp >= stages_t.stage1_timestamp
AND ISE.timestamp <= stages_t.stage{len(stages)}_timestamp
AND ISE.timestamp <= stages_t.stage{i + 1}_timestamp
AND ISS.project_id=%(project_id)s
{"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""}) AS base_t
) AS issues_t

View file

@ -698,7 +698,7 @@ def get_funnel_insights(projectId: int, funnelId: int, rangeValue: str = None, s
def get_funnel_insights_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelInsightsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return funnels.get_top_insights_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
data=data.dict())
data=data)
@app.get('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"])
@ -713,7 +713,7 @@ def get_funnel_issues(projectId: int, funnelId, rangeValue: str = None, startDat
def get_funnel_issues_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": funnels.get_issues_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
data=data.dict())}
data=data)}
@app.get('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"])
@ -737,10 +737,11 @@ def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas.
def get_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None,
context: schemas.CurrentContext = Depends(OR_context)):
issue = issues.get(project_id=projectId, issue_id=issueId)
if issue is None:
return {"errors": ["issue not found"]}
return {
"data": {"sessions": sessions.search_by_issue(user_id=context.user_id, project_id=projectId, issue=issue,
start_date=startDate,
end_date=endDate),
start_date=startDate, end_date=endDate),
"issue": issue}}

View file

@ -662,6 +662,7 @@ class FunnelSearchPayloadSchema(FlatSessionsSearchPayloadSchema):
order: Optional[str] = Field(None)
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True)
group_by_user: Optional[bool] = Field(default=False, const=True)
rangeValue: Optional[str] = Field(None)
@root_validator(pre=True)
def enforce_default_values(cls, values):
@ -694,6 +695,7 @@ class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema):
order: Optional[str] = Field(None)
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True)
group_by_user: Optional[bool] = Field(default=False, const=True)
rangeValue: Optional[str] = Field(None)
class MetricPayloadSchema(BaseModel):

View file

@ -211,9 +211,9 @@ function FunnelGraph(props) {
strokeWidth={0}
interval={0}
tick ={{ fill: '#666', fontSize: 12 }}
xAxisId={0}
xAxisId={0}
/>
<XAxis
{/* <XAxis
stroke={0}
xAxisId={1}
dataKey="value"
@ -222,7 +222,7 @@ function FunnelGraph(props) {
dy={-15} dx={0}
tick ={{ fill: '#666', fontSize: 12 }}
tickFormatter={val => '"' + val + '"'}
/>
/> */}
<YAxis interval={ 0 } strokeWidth={0} tick ={{ fill: '#999999', fontSize: 11 }} tickFormatter={val => Styles.tickFormatter(val)} />
</BarChart>
</div>

View file

@ -23,11 +23,12 @@ const FunnelHeader = (props) => {
const [showSaveModal, setShowSaveModal] = useState(false)
const writeOption = (e, { name, value }) => {
props.fetch(value)
props.fetchInsights(value, {})
props.fetchIssuesFiltered(value, {})
props.fetchSessionsFiltered(value, {})
props.redirect(value)
props.fetch(value).then(() => {
props.fetchInsights(value, {})
props.fetchIssuesFiltered(value, {})
props.fetchSessionsFiltered(value, {})
props.redirect(value)
})
}
const deleteFunnel = async (e, funnel) => {

View file

@ -60,7 +60,7 @@ function FilterAutoComplete(props: Props) {
.finally(() => setLoading(false));
}
const debouncedRequestValues = React.useCallback(debounce(requestValues, 300), []);
const debouncedRequestValues = React.useCallback(debounce(requestValues, 1000), []);
const onInputChange = ({ target: { value } }) => {
setQuery(value);

View file

@ -8,15 +8,16 @@ import { connect } from 'react-redux';
interface Props {
clearSearch: () => void;
appliedFilter: any;
optionsReady: boolean;
}
const MainSearchBar = (props: Props) => {
const { appliedFilter } = props;
const { appliedFilter, optionsReady } = props;
const hasFilters = appliedFilter && appliedFilter.filters && appliedFilter.filters.size > 0;
return (
<div className="flex items-center">
<div style={{ width: "60%", marginRight: "10px"}}><SessionSearchField /></div>
<div className="flex items-center" style={{ width: "40%"}}>
<SavedSearch />
{optionsReady && <SavedSearch /> }
<Popup
trigger={
<Button
@ -39,4 +40,5 @@ const MainSearchBar = (props: Props) => {
}
export default connect(state => ({
appliedFilter: state.getIn(['search', 'instance']),
optionsReady: state.getIn(['customFields', 'optionsReady'])
}), { clearSearch })(MainSearchBar);

View file

@ -31,6 +31,7 @@ const initialState = Map({
list: List(),
instance: CustomField(),
sources: List(),
optionsReady: false
});
const reducer = (state = initialState, action = {}) => {
@ -40,7 +41,8 @@ const reducer = (state = initialState, action = {}) => {
addElementToFiltersMap(FilterCategory.METADATA, item.key);
addElementToLiveFiltersMap(FilterCategory.METADATA, item.key);
});
return state.set('list', List(action.data).map(CustomField)) //.concat(defaultMeta))
return state.set('list', List(action.data).map(CustomField))
.set('optionsReady', true) //.concat(defaultMeta))
case FETCH_SOURCES_SUCCESS:
return state.set('sources', List(action.data.map(({ value, ...item}) => ({label: value, key: value, ...item}))).map(CustomField))
case SAVE_SUCCESS:

View file

@ -6,6 +6,7 @@ import { capitalize } from 'App/utils';
const countryOptions = Object.keys(countries).map(i => ({ text: countries[i], value: i }));
const containsFilters = [{ key: 'contains', text: 'contains', value: 'contains' }]
export const metaFilter = { key: FilterKey.METADATA, type: FilterType.MULTIPLE, category: FilterCategory.METADATA, label: 'Metadata', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/metadata' };
export const filtersMap = {
// EVENTS
[FilterKey.CLICK]: { key: FilterKey.CLICK, type: FilterType.MULTIPLE, category: FilterCategory.INTERACTIONS, label: 'Click', operator: 'on', operatorOptions: filterOptions.targetOperators, icon: 'filters/click', isEvent: true },
@ -31,7 +32,6 @@ export const filtersMap = {
[FilterKey.ERROR]: { key: FilterKey.ERROR, type: FilterType.MULTIPLE, category: FilterCategory.JAVASCRIPT, label: 'Error', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/error', isEvent: true },
// [FilterKey.METADATA]: { key: FilterKey.METADATA, type: FilterType.MULTIPLE, category: FilterCategory.METADATA, label: 'Metadata', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/metadata', isEvent: true },
// FILTERS
[FilterKey.USER_OS]: { key: FilterKey.USER_OS, type: FilterType.MULTIPLE, category: FilterCategory.GEAR, label: 'User OS', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/os' },
[FilterKey.USER_BROWSER]: { key: FilterKey.USER_BROWSER, type: FilterType.MULTIPLE, category: FilterCategory.GEAR, label: 'User Browser', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/browser' },
@ -133,7 +133,11 @@ export default Record({
})
_filter = subFilterMap[type]
} else {
_filter = filtersMap[type];
if (type === FilterKey.METADATA) {
_filter = filtersMap[filter.source];
} else {
_filter = filtersMap[type];
}
}
return {
...filter,

View file

@ -51,8 +51,9 @@ export default Record({
}
},
fromJS: ({ stages = [], filter, activeStages = null, ...rest }) => {
let _stages = stages.map(stage => {
stage.label = getRedableName(stage.type, stage.value);
let _stages = stages.map((stage, index) => {
// stage.label = getRedableName(stage.type, stage.value);
stage.label = `Step ${index + 1}`;
return stage;
});
@ -70,16 +71,19 @@ export default Record({
return {
...rest,
stages: _stages.length > 0 ? _stages.map(stage => {
stages: _stages.length > 0 ? _stages.map((stage, index) => {
if (!stage) return;
stage.label = getRedableName(stage);
// stage.label = getRedableName(stage);
stage.label = `Step ${index + 1}`;
return stage;
}) : [],
affectedUsers,
lostConversions,
conversionImpact,
firstStage: firstStage && firstStage.label + ' ' + truncate(firstStage.value || '', 10) || '',
lastStage: lastStage && lastStage.label + ' ' + truncate(lastStage.value || '', 10) || '',
// firstStage: firstStage && firstStage.label + ' ' + truncate(firstStage.value || '', 10) || '',
// lastStage: lastStage && lastStage.label + ' ' + truncate(lastStage.value || '', 10) || '',
firstStage: firstStage && firstStage.label || '',
lastStage: lastStage && lastStage.label || '',
filter: Filter(filter),
sessionsCount: lastStage && lastStage.sessionsCount,
stepsCount: stages ? stages.length : 0,