Merge remote-tracking branch 'origin/dev' into api-bookmarked-pagination
# Conflicts: # api/chalicelib/core/sessions.py
This commit is contained in:
commit
4fba0c6506
16 changed files with 246 additions and 172 deletions
2
LICENSE
2
LICENSE
|
|
@ -5,7 +5,7 @@ OpenReplay monorepo uses multiple licenses. Portions of this software are licens
|
|||
- All content that resides under the "ee/" directory of this repository, is licensed under the license defined in "ee/LICENSE".
|
||||
- Content outside of the above mentioned directories or restrictions above is available under the "Elastic License 2.0 (ELv2)" license as defined below.
|
||||
|
||||
[Reach out](mailto:license@openreplay.com) if you have any questions regarding licenses.
|
||||
Reach out (license@openreplay.com) if you have any questions regarding licenses.
|
||||
|
||||
------------------------------------------------------------------------------------
|
||||
Elastic License 2.0 (ELv2)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import json
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import sourcemaps, sessions
|
||||
from chalicelib.utils import pg_client, helper, dev
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
|
@ -405,9 +406,9 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n
|
|||
if chart:
|
||||
ch_sub_query += [f"timestamp >= generated_timestamp",
|
||||
f"timestamp < generated_timestamp + %({step_size_name})s"]
|
||||
if platform == 'mobile':
|
||||
if platform == schemas.PlatformType.mobile:
|
||||
ch_sub_query.append("user_device_type = 'mobile'")
|
||||
elif platform == 'desktop':
|
||||
elif platform == schemas.PlatformType.desktop:
|
||||
ch_sub_query.append("user_device_type = 'desktop'")
|
||||
return ch_sub_query
|
||||
|
||||
|
|
@ -421,23 +422,28 @@ def __get_sort_key(key):
|
|||
|
||||
|
||||
@dev.timed
|
||||
def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=False):
|
||||
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, status="ALL", favorite_only=False):
|
||||
status = status.upper()
|
||||
if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']:
|
||||
return {"errors": ["invalid error status"]}
|
||||
pg_sub_query = __get_basic_constraints(data.get('platform'), project_key="sessions.project_id")
|
||||
platform = None
|
||||
for f in data.filters:
|
||||
if f.type == schemas.FilterType.platform and len(f.value) > 0:
|
||||
platform = f.value[0]
|
||||
pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id")
|
||||
pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
|
||||
"pe.project_id=%(project_id)s"]
|
||||
pg_sub_query_chart = __get_basic_constraints(data.get('platform'), time_constraint=False, chart=True)
|
||||
pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True)
|
||||
pg_sub_query_chart.append("source ='js_exception'")
|
||||
pg_sub_query_chart.append("errors.error_id =details.error_id")
|
||||
statuses = []
|
||||
error_ids = None
|
||||
if data.get("startDate") is None:
|
||||
data["startDate"] = TimeUTC.now(-30)
|
||||
if data.get("endDate") is None:
|
||||
data["endDate"] = TimeUTC.now(1)
|
||||
if len(data.get("events", [])) > 0 or len(data.get("filters", [])) > 0 or status != "ALL" or favorite_only:
|
||||
if data.startDate is None:
|
||||
data.startDate = TimeUTC.now(-30)
|
||||
if data.endDate is None:
|
||||
data.endDate = TimeUTC.now(1)
|
||||
if len(data.events) > 0 or len(data.filters) > 0 or status != "ALL":
|
||||
# if favorite_only=True search for sessions associated with favorite_error
|
||||
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
|
||||
error_status=status)
|
||||
if len(statuses) == 0:
|
||||
|
|
@ -447,28 +453,30 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
|
|||
}}
|
||||
error_ids = [e["error_id"] for e in statuses]
|
||||
with pg_client.PostgresClient() as cur:
|
||||
if data.get("startDate") is None:
|
||||
data["startDate"] = TimeUTC.now(-7)
|
||||
if data.get("endDate") is None:
|
||||
data["endDate"] = TimeUTC.now()
|
||||
density = data.get("density", 7)
|
||||
step_size = __get_step_size(data["startDate"], data["endDate"], density, factor=1)
|
||||
if data.startDate is None:
|
||||
data.startDate = TimeUTC.now(-7)
|
||||
if data.endDate is None:
|
||||
data.endDate = TimeUTC.now()
|
||||
step_size = __get_step_size(data.startDate, data.endDate, data.density, factor=1)
|
||||
sort = __get_sort_key('datetime')
|
||||
if data.get("sort") is not None:
|
||||
sort = __get_sort_key(data["sort"])
|
||||
if data.sort is not None:
|
||||
sort = __get_sort_key(data.sort)
|
||||
order = "DESC"
|
||||
if data.get("order") is not None:
|
||||
order = data["order"]
|
||||
|
||||
if data.order is not None:
|
||||
order = data.order
|
||||
extra_join = ""
|
||||
params = {
|
||||
"startDate": data['startDate'],
|
||||
"endDate": data['endDate'],
|
||||
"startDate": data.startDate,
|
||||
"endDate": data.endDate,
|
||||
"project_id": project_id,
|
||||
"userId": user_id,
|
||||
"step_size": step_size}
|
||||
if error_ids is not None:
|
||||
params["error_ids"] = tuple(error_ids)
|
||||
pg_sub_query.append("error_id IN %(error_ids)s")
|
||||
if favorite_only:
|
||||
pg_sub_query.append("ufe.user_id = %(userId)s")
|
||||
extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
|
||||
main_pg_query = f"""\
|
||||
SELECT error_id,
|
||||
name,
|
||||
|
|
@ -488,6 +496,7 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
|
|||
FROM events.errors
|
||||
INNER JOIN public.errors AS pe USING (error_id)
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
{extra_join}
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
GROUP BY error_id, name, message
|
||||
ORDER BY {sort} {order}) AS details
|
||||
|
|
@ -581,7 +590,7 @@ def __save_stacktrace(error_id, data):
|
|||
|
||||
|
||||
def get_trace(project_id, error_id):
|
||||
error = get(error_id=error_id)
|
||||
error = get(error_id=error_id, family=False)
|
||||
if error is None:
|
||||
return {"errors": ["error not found"]}
|
||||
if error.get("source", "") != "js_exception":
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import json
|
||||
from typing import List
|
||||
|
||||
import chalicelib.utils.helper
|
||||
import schemas
|
||||
|
|
@ -12,12 +13,38 @@ REMOVE_KEYS = ["key", "_key", "startDate", "endDate"]
|
|||
ALLOW_UPDATE_FOR = ["name", "filter"]
|
||||
|
||||
|
||||
# def filter_stages(stages):
|
||||
# ALLOW_TYPES = [events.event_type.CLICK.ui_type, events.event_type.INPUT.ui_type,
|
||||
# events.event_type.LOCATION.ui_type, events.event_type.CUSTOM.ui_type,
|
||||
# events.event_type.CLICK_IOS.ui_type, events.event_type.INPUT_IOS.ui_type,
|
||||
# events.event_type.VIEW_IOS.ui_type, events.event_type.CUSTOM_IOS.ui_type, ]
|
||||
# return [s for s in stages if s["type"] in ALLOW_TYPES and s.get("value") is not None]
|
||||
def filter_stages(stages: List[schemas._SessionSearchEventSchema]):
|
||||
ALLOW_TYPES = [schemas.EventType.click, schemas.EventType.input,
|
||||
schemas.EventType.location, schemas.EventType.custom,
|
||||
schemas.EventType.click_ios, schemas.EventType.input_ios,
|
||||
schemas.EventType.view_ios, schemas.EventType.custom_ios, ]
|
||||
return [s for s in stages if s.type in ALLOW_TYPES and s.value is not None]
|
||||
|
||||
|
||||
def __parse_events(f_events: List[dict]):
|
||||
return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events]
|
||||
|
||||
|
||||
def __unparse_events(f_events: List[schemas._SessionSearchEventSchema]):
|
||||
return [e.dict() for e in f_events]
|
||||
|
||||
|
||||
def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]):
|
||||
if f_events is None:
|
||||
return
|
||||
events = []
|
||||
for e in f_events:
|
||||
if e.operator is None:
|
||||
e.operator = schemas.SearchEventOperator._is
|
||||
|
||||
if not isinstance(e.value, list):
|
||||
e.value = [e.value]
|
||||
is_any = sessions._isAny_opreator(e.operator)
|
||||
if not is_any and isinstance(e.value, list) and len(e.value) == 0:
|
||||
continue
|
||||
events.append(e)
|
||||
return events
|
||||
|
||||
|
||||
def __transform_old_funnels(events):
|
||||
for e in events:
|
||||
|
|
@ -28,7 +55,7 @@ def __transform_old_funnels(events):
|
|||
|
||||
def create(project_id, user_id, name, filter: schemas.FunnelSearchPayloadSchema, is_public):
|
||||
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
|
||||
# filter.events = filter_stages(stages=filter.events)
|
||||
filter.events = filter_stages(stages=filter.events)
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify("""\
|
||||
INSERT INTO public.funnels (project_id, user_id, name, filter,is_public)
|
||||
|
|
@ -76,6 +103,8 @@ def update(funnel_id, user_id, project_id, name=None, filter=None, is_public=Non
|
|||
query
|
||||
)
|
||||
r = cur.fetchone()
|
||||
if r is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
|
||||
r = helper.dict_to_camel_case(r)
|
||||
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
|
||||
|
|
@ -102,9 +131,9 @@ def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date
|
|||
for row in rows:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
if details:
|
||||
# row["filter"]["events"] = filter_stages(row["filter"]["events"])
|
||||
row["filter"]["events"] = filter_stages(__parse_events(row["filter"]["events"]))
|
||||
if row.get("filter") is not None and row["filter"].get("events") is not None:
|
||||
row["filter"]["events"] = __transform_old_funnels(row["filter"]["events"])
|
||||
row["filter"]["events"] = __transform_old_funnels(__unparse_events(row["filter"]["events"]))
|
||||
|
||||
get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date,
|
||||
end_date=end_date)
|
||||
|
|
@ -168,7 +197,8 @@ def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=No
|
|||
|
||||
|
||||
def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema):
|
||||
# data.events = filter_stages(data.events)
|
||||
data.events = filter_stages(data.events)
|
||||
data.events = __fix_stages(data.events)
|
||||
if len(data.events) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
|
||||
if f is None:
|
||||
|
|
@ -192,17 +222,18 @@ def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_dat
|
|||
"totalDropDueToIssues": total_drop_due_to_issues}}
|
||||
|
||||
|
||||
def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data):
|
||||
# data["events"] = filter_stages(data.get("events", []))
|
||||
if len(data["events"]) == 0:
|
||||
def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema):
|
||||
data.events = filter_stages(__parse_events(data.events))
|
||||
if len(data.events) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None),
|
||||
start_date=data.get('startDate', None),
|
||||
end_date=data.get('endDate', None))
|
||||
data = f["filter"]
|
||||
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data, project_id=project_id)
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
|
||||
start_date=data.startDate,
|
||||
end_date=data.endDate)
|
||||
data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"])
|
||||
data.events = __fix_stages(data.events)
|
||||
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
|
||||
if len(insights) > 0:
|
||||
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
|
||||
return {"data": {"stages": helper.list_to_camel_case(insights),
|
||||
|
|
@ -220,25 +251,26 @@ def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None
|
|||
|
||||
|
||||
@dev.timed
|
||||
def get_issues_on_the_fly(funnel_id, user_id, project_id, data):
|
||||
first_stage = data.get("firstStage")
|
||||
last_stage = data.get("lastStage")
|
||||
# data["events"] = filter_stages(data.get("events", []))
|
||||
if len(data["events"]) == 0:
|
||||
def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema):
|
||||
data.events = filter_stages(data.events)
|
||||
data.events = __fix_stages(data.events)
|
||||
if len(data.events) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None),
|
||||
start_date=data.get('startDate', None),
|
||||
end_date=data.get('endDate', None))
|
||||
data = f["filter"]
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
|
||||
start_date=data.startDate,
|
||||
end_date=data.endDate)
|
||||
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
|
||||
if len(data.events) < 2:
|
||||
return {"issues": []}
|
||||
return {
|
||||
"issues": helper.dict_to_camel_case(
|
||||
significance.get_issues_list(filter_d=data, project_id=project_id, first_stage=first_stage,
|
||||
last_stage=last_stage))}
|
||||
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
|
||||
last_stage=len(data.events)))}
|
||||
|
||||
|
||||
def get(funnel_id, project_id, user_id, flatten=True):
|
||||
def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
|
|
@ -260,7 +292,11 @@ def get(funnel_id, project_id, user_id, flatten=True):
|
|||
if f.get("filter") is not None and f["filter"].get("events") is not None:
|
||||
f["filter"]["events"] = __transform_old_funnels(f["filter"]["events"])
|
||||
f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"])
|
||||
# f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
|
||||
f["filter"]["events"] = __parse_events(f["filter"]["events"])
|
||||
f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
|
||||
if fix_stages:
|
||||
f["filter"]["events"] = __fix_stages(f["filter"]["events"])
|
||||
f["filter"]["events"] = [e.dict() for e in f["filter"]["events"]]
|
||||
if flatten:
|
||||
f["filter"] = helper.old_search_payload_to_flat(f["filter"])
|
||||
return f
|
||||
|
|
@ -279,7 +315,7 @@ def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.Funn
|
|||
end_date=data.endDate)
|
||||
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
|
||||
|
||||
issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data.dict()) \
|
||||
issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data) \
|
||||
.get("issues", {})
|
||||
issues = issues.get("significant", []) + issues.get("insignificant", [])
|
||||
issue = None
|
||||
|
|
|
|||
|
|
@ -228,14 +228,15 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e
|
|||
ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions;""",
|
||||
full_args)
|
||||
|
||||
print("--------------------")
|
||||
print(main_query)
|
||||
print("--------------------")
|
||||
cur.execute(main_query)
|
||||
# print("--------------------")
|
||||
# print(main_query)
|
||||
# print("--------------------")
|
||||
|
||||
if count_only:
|
||||
return helper.dict_to_camel_case(cur.fetchone())
|
||||
cur.execute(main_query)
|
||||
sessions = cur.fetchone()
|
||||
if count_only:
|
||||
return helper.dict_to_camel_case(sessions)
|
||||
|
||||
total = sessions["count"]
|
||||
sessions = sessions["sessions"]
|
||||
# sessions = []
|
||||
|
|
@ -281,7 +282,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
|
||||
operator=schemas.SearchEventOperator._is))
|
||||
full_args, query_part, sort = search_query_parts(data=data, error_status=None, errors_only=False,
|
||||
issue=None, project_id=project_id,
|
||||
favorite_only=False, issue=None, project_id=project_id,
|
||||
user_id=None, extra_event=extra_event)
|
||||
full_args["step_size"] = step_size
|
||||
sessions = []
|
||||
|
|
@ -365,7 +366,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
return sessions
|
||||
|
||||
|
||||
def search_query_parts(data, error_status, errors_only, issue, project_id, user_id, extra_event=None):
|
||||
def search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id, user_id, extra_event=None):
|
||||
ss_constraints = []
|
||||
full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate,
|
||||
"projectId": project_id, "userId": user_id}
|
||||
|
|
@ -375,7 +376,7 @@ def search_query_parts(data, error_status, errors_only, issue, project_id, user_
|
|||
]
|
||||
extra_from = ""
|
||||
fav_only_join = ""
|
||||
if data.bookmarked and not errors_only:
|
||||
if favorite_only and not errors_only:
|
||||
fav_only_join = "LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id"
|
||||
# extra_constraints.append("fs.user_id = %(userId)s")
|
||||
events_query_part = ""
|
||||
|
|
@ -969,9 +970,9 @@ def search_query_parts(data, error_status, errors_only, issue, project_id, user_
|
|||
if error_status != "ALL":
|
||||
extra_constraints.append("ser.status = %(error_status)s")
|
||||
full_args["status"] = error_status.lower()
|
||||
if data.bookmarked:
|
||||
if favorite_only:
|
||||
extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
|
||||
extra_constraints.append("ufe.user_id = %(user_id)s")
|
||||
extra_constraints.append("ufe.user_id = %(userId)s")
|
||||
# extra_constraints = [extra.decode('UTF-8') + "\n" for extra in extra_constraints]
|
||||
if data.bookmarked and not errors_only and user_id is not None:
|
||||
extra_from += """INNER JOIN (SELECT user_id, session_id
|
||||
|
|
@ -1199,11 +1200,11 @@ def get_session_user(project_id, user_id):
|
|||
"public".sessions
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
AND user_id = %(user_id)s
|
||||
AND user_id = %(userId)s
|
||||
AND duration is not null
|
||||
GROUP BY user_id;
|
||||
""",
|
||||
{"project_id": project_id, "user_id": user_id}
|
||||
{"project_id": project_id, "userId": user_id}
|
||||
)
|
||||
cur.execute(query=query)
|
||||
data = cur.fetchone()
|
||||
|
|
@ -1216,8 +1217,8 @@ def get_session_ids_by_user_ids(project_id, user_ids):
|
|||
"""\
|
||||
SELECT session_id FROM public.sessions
|
||||
WHERE
|
||||
project_id = %(project_id)s AND user_id IN %(user_id)s;""",
|
||||
{"project_id": project_id, "user_id": tuple(user_ids)}
|
||||
project_id = %(project_id)s AND user_id IN %(userId)s;""",
|
||||
{"project_id": project_id, "userId": tuple(user_ids)}
|
||||
)
|
||||
ids = cur.execute(query=query)
|
||||
return ids
|
||||
|
|
@ -1243,8 +1244,8 @@ def delete_sessions_by_user_ids(project_id, user_ids):
|
|||
"""\
|
||||
DELETE FROM public.sessions
|
||||
WHERE
|
||||
project_id = %(project_id)s AND user_id IN %(user_id)s;""",
|
||||
{"project_id": project_id, "user_id": tuple(user_ids)}
|
||||
project_id = %(project_id)s AND user_id IN %(userId)s;""",
|
||||
{"project_id": project_id, "userId": tuple(user_ids)}
|
||||
)
|
||||
cur.execute(query=query)
|
||||
|
||||
|
|
|
|||
|
|
@ -118,12 +118,9 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
|
|||
first_stage_extra_constraints.append(
|
||||
sessions._multiple_conditions(f's.rev_id {op} %({f_k})s', f["value"], value_key=f_k))
|
||||
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
i = -1
|
||||
for s in stages:
|
||||
|
||||
for i, s in enumerate(stages):
|
||||
if i == 0:
|
||||
extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
|
||||
else:
|
||||
extra_from = []
|
||||
if s.get("operator") is None:
|
||||
s["operator"] = "is"
|
||||
|
||||
|
|
@ -132,6 +129,11 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
|
|||
is_any = sessions._isAny_opreator(s["operator"])
|
||||
if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0:
|
||||
continue
|
||||
i += 1
|
||||
if i == 0:
|
||||
extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
|
||||
else:
|
||||
extra_from = []
|
||||
op = sessions.__get_sql_operator(s["operator"])
|
||||
event_type = s["type"].upper()
|
||||
if event_type == events.event_type.CLICK.ui_type:
|
||||
|
|
@ -213,7 +215,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
|
|||
ISS.issue_id as issue_id
|
||||
FROM events_common.issues AS ISE INNER JOIN issues AS ISS USING (issue_id)
|
||||
WHERE ISE.timestamp >= stages_t.stage1_timestamp
|
||||
AND ISE.timestamp <= stages_t.stage{len(stages)}_timestamp
|
||||
AND ISE.timestamp <= stages_t.stage{i + 1}_timestamp
|
||||
AND ISS.project_id=%(project_id)s
|
||||
{"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""}) AS base_t
|
||||
) AS issues_t
|
||||
|
|
|
|||
|
|
@ -716,7 +716,7 @@ def get_funnel_insights(projectId: int, funnelId: int, rangeValue: str = None, s
|
|||
def get_funnel_insights_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelInsightsPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return funnels.get_top_insights_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
|
||||
data=data.dict())
|
||||
data=data)
|
||||
|
||||
|
||||
@app.get('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"])
|
||||
|
|
@ -731,7 +731,7 @@ def get_funnel_issues(projectId: int, funnelId, rangeValue: str = None, startDat
|
|||
def get_funnel_issues_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": funnels.get_issues_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
|
||||
data=data.dict())}
|
||||
data=data)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"])
|
||||
|
|
@ -755,10 +755,11 @@ def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas.
|
|||
def get_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
issue = issues.get(project_id=projectId, issue_id=issueId)
|
||||
if issue is None:
|
||||
return {"errors": ["issue not found"]}
|
||||
return {
|
||||
"data": {"sessions": sessions.search_by_issue(user_id=context.user_id, project_id=projectId, issue=issue,
|
||||
start_date=startDate,
|
||||
end_date=endDate),
|
||||
start_date=startDate, end_date=endDate),
|
||||
"issue": issue}}
|
||||
|
||||
|
||||
|
|
@ -906,8 +907,7 @@ def errors_search(projectId: int, status: str = "ALL", favorite: Union[str, bool
|
|||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if isinstance(favorite, str):
|
||||
favorite = True if len(favorite) == 0 else False
|
||||
return errors.search(data.dict(), projectId, user_id=context.user_id, status=status,
|
||||
favorite_only=favorite)
|
||||
return errors.search(data, projectId, user_id=context.user_id, status=status, favorite_only=favorite)
|
||||
|
||||
|
||||
@app.get('/{projectId}/errors/stats', tags=['errors'])
|
||||
|
|
|
|||
|
|
@ -83,15 +83,6 @@ class EditSlackSchema(BaseModel):
|
|||
url: HttpUrl = Field(...)
|
||||
|
||||
|
||||
class SearchErrorsSchema(BaseModel):
|
||||
platform: Optional[str] = Field(None)
|
||||
startDate: Optional[int] = Field(TimeUTC.now(-7))
|
||||
endDate: Optional[int] = Field(TimeUTC.now())
|
||||
density: Optional[int] = Field(7)
|
||||
sort: Optional[str] = Field(None)
|
||||
order: Optional[str] = Field(None)
|
||||
|
||||
|
||||
class CreateNotificationSchema(BaseModel):
|
||||
token: str = Field(...)
|
||||
notifications: List = Field(...)
|
||||
|
|
@ -663,6 +654,7 @@ class FunnelSearchPayloadSchema(FlatSessionsSearchPayloadSchema):
|
|||
order: Optional[str] = Field(None)
|
||||
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True)
|
||||
group_by_user: Optional[bool] = Field(default=False, const=True)
|
||||
rangeValue: Optional[str] = Field(None)
|
||||
|
||||
@root_validator(pre=True)
|
||||
def enforce_default_values(cls, values):
|
||||
|
|
@ -695,6 +687,11 @@ class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema):
|
|||
order: Optional[str] = Field(None)
|
||||
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True)
|
||||
group_by_user: Optional[bool] = Field(default=False, const=True)
|
||||
rangeValue: Optional[str] = Field(None)
|
||||
|
||||
|
||||
class SearchErrorsSchema(SessionsSearchPayloadSchema):
|
||||
density: Optional[int] = Field(7)
|
||||
|
||||
|
||||
class MetricPayloadSchema(BaseModel):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import json
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import dashboard
|
||||
from chalicelib.core import sourcemaps, sessions
|
||||
from chalicelib.utils import ch_client
|
||||
|
|
@ -265,7 +266,7 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
COALESCE((SELECT TRUE
|
||||
FROM public.user_favorite_errors AS fe
|
||||
WHERE pe.error_id = fe.error_id
|
||||
AND fe.user_id = %(user_id)s), FALSE) AS favorite,
|
||||
AND fe.user_id = %(userId)s), FALSE) AS favorite,
|
||||
True AS viewed
|
||||
FROM public.errors AS pe
|
||||
INNER JOIN events.errors AS ee USING (error_id)
|
||||
|
|
@ -274,7 +275,7 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
AND error_id = %(error_id)s
|
||||
ORDER BY start_ts DESC
|
||||
LIMIT 1;""",
|
||||
{"project_id": project_id, "error_id": error_id, "user_id": user_id})
|
||||
{"project_id": project_id, "error_id": error_id, "userId": user_id})
|
||||
cur.execute(query=query)
|
||||
status = cur.fetchone()
|
||||
|
||||
|
|
@ -443,54 +444,74 @@ def __get_sort_key(key):
|
|||
}.get(key, 'max_datetime')
|
||||
|
||||
|
||||
def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=False):
|
||||
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, status="ALL", favorite_only=False):
|
||||
empty_response = {"data": {
|
||||
'total': 0,
|
||||
'errors': []
|
||||
}}
|
||||
status = status.upper()
|
||||
if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']:
|
||||
return {"errors": ["invalid error status"]}
|
||||
ch_sub_query = __get_basic_constraints(data.get('platform'))
|
||||
platform = None
|
||||
for f in data.filters:
|
||||
if f.type == schemas.FilterType.platform and len(f.value) > 0:
|
||||
platform = f.value[0]
|
||||
ch_sub_query = __get_basic_constraints(platform)
|
||||
ch_sub_query.append("source ='js_exception'")
|
||||
statuses = []
|
||||
error_ids = None
|
||||
if data.get("startDate") is None:
|
||||
data["startDate"] = TimeUTC.now(-30)
|
||||
if data.get("endDate") is None:
|
||||
data["endDate"] = TimeUTC.now(1)
|
||||
if len(data.get("events", [])) > 0 or len(data.get("filters", [])) > 0 or status != "ALL" or favorite_only:
|
||||
if data.startDate is None:
|
||||
data.startDate = TimeUTC.now(-30)
|
||||
if data.endDate is None:
|
||||
data.endDate = TimeUTC.now(1)
|
||||
if len(data.events) > 0 or len(data.filters) > 0 or status != "ALL":
|
||||
# if favorite_only=True search for sessions associated with favorite_error
|
||||
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
|
||||
error_status=status, favorite_only=favorite_only)
|
||||
error_ids = [e["error_id"] for e in statuses]
|
||||
if len(statuses) == 0:
|
||||
return {"data": {
|
||||
'total': 0,
|
||||
'errors': []
|
||||
}}
|
||||
with ch_client.ClickHouseClient() as ch:
|
||||
if data.get("startDate") is None:
|
||||
data["startDate"] = TimeUTC.now(-7)
|
||||
if data.get("endDate") is None:
|
||||
data["endDate"] = TimeUTC.now()
|
||||
density = data.get("density", 7)
|
||||
step_size = __get_step_size(data["startDate"], data["endDate"], density)
|
||||
return empty_response
|
||||
error_ids = [e["error_id"] for e in statuses]
|
||||
with ch_client.ClickHouseClient() as ch, pg_client.PostgresClient() as cur:
|
||||
if data.startDate is None:
|
||||
data.startDate = TimeUTC.now(-7)
|
||||
if data.endDate is None:
|
||||
data.endDate = TimeUTC.now()
|
||||
step_size = __get_step_size(data.startDate, data.endDate, data.density)
|
||||
sort = __get_sort_key('datetime')
|
||||
if data.get("sort") is not None:
|
||||
sort = __get_sort_key(data["sort"])
|
||||
if data.sort is not None:
|
||||
sort = __get_sort_key(data.sort)
|
||||
order = "DESC"
|
||||
if data.get("order") is not None:
|
||||
order = data["order"]
|
||||
|
||||
if data.order is not None:
|
||||
order = data.order
|
||||
extra_join = ""
|
||||
params = {
|
||||
"startDate": data['startDate'],
|
||||
"endDate": data['endDate'],
|
||||
"startDate": data.startDate,
|
||||
"endDate": data.endDate,
|
||||
"project_id": project_id,
|
||||
"userId": user_id,
|
||||
"step_size": step_size}
|
||||
if favorite_only:
|
||||
cur.execute(cur.mogrify(f"""SELECT error_id
|
||||
FROM public.user_favorite_errors
|
||||
WHERE user_id = %(userId)s
|
||||
{"" if error_ids is None else "AND error_id IN %(error_ids)s"}""",
|
||||
{"userId": user_id, "error_ids": tuple(error_ids or [])}))
|
||||
error_ids = cur.fetchall()
|
||||
if len(error_ids) == 0:
|
||||
return empty_response
|
||||
error_ids = [e["error_id"] for e in error_ids]
|
||||
|
||||
if error_ids is not None:
|
||||
params["error_ids"] = tuple(error_ids)
|
||||
ch_sub_query.append("error_id IN %(error_ids)s")
|
||||
|
||||
main_ch_query = f"""\
|
||||
SELECT COUNT(DISTINCT error_id) AS count
|
||||
FROM errors
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
# print("------------")
|
||||
# print(ch.client().substitute_params(main_ch_query, params))
|
||||
# print("------------")
|
||||
total = ch.execute(query=main_ch_query, params=params)[0]["count"]
|
||||
if flows:
|
||||
return {"data": {"count": total}}
|
||||
|
|
@ -510,7 +531,7 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
|
|||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY error_id, name, message
|
||||
ORDER BY {sort} {order}
|
||||
LIMIT 1001) AS details INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence
|
||||
LIMIT 200) AS details INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence
|
||||
FROM errors
|
||||
GROUP BY error_id) AS time_details
|
||||
ON details.error_id=time_details.error_id
|
||||
|
|
@ -527,23 +548,22 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
|
|||
# print(main_ch_query % params)
|
||||
rows = ch.execute(query=main_ch_query, params=params)
|
||||
if len(statuses) == 0:
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""SELECT error_id, status, parent_error_id, payload,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_favorite_errors AS fe
|
||||
WHERE errors.error_id = fe.error_id
|
||||
AND fe.user_id = %(user_id)s LIMIT 1), FALSE) AS favorite,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_viewed_errors AS ve
|
||||
WHERE errors.error_id = ve.error_id
|
||||
AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed
|
||||
FROM public.errors
|
||||
WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
|
||||
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
|
||||
"user_id": user_id})
|
||||
cur.execute(query=query)
|
||||
statuses = cur.fetchall()
|
||||
query = cur.mogrify(
|
||||
"""SELECT error_id, status, parent_error_id, payload,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_favorite_errors AS fe
|
||||
WHERE errors.error_id = fe.error_id
|
||||
AND fe.user_id = %(userId)s LIMIT 1), FALSE) AS favorite,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_viewed_errors AS ve
|
||||
WHERE errors.error_id = ve.error_id
|
||||
AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
|
||||
FROM public.errors
|
||||
WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
|
||||
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
|
||||
"userId": user_id})
|
||||
cur.execute(query=query)
|
||||
statuses = cur.fetchall()
|
||||
statuses = {
|
||||
s["error_id"]: s for s in statuses
|
||||
}
|
||||
|
|
@ -565,9 +585,9 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
|
|||
r["chart"] = list(r["chart"])
|
||||
for i in range(len(r["chart"])):
|
||||
r["chart"][i] = {"timestamp": r["chart"][i][0], "count": r["chart"][i][1]}
|
||||
r["chart"] = dashboard.__complete_missing_steps(rows=r["chart"], start_time=data["startDate"],
|
||||
end_time=data["endDate"],
|
||||
density=density, neutral={"count": 0})
|
||||
r["chart"] = dashboard.__complete_missing_steps(rows=r["chart"], start_time=data.startDate,
|
||||
end_time=data.endDate,
|
||||
density=data.density, neutral={"count": 0})
|
||||
offset = len(rows)
|
||||
rows = [r for r in rows if r["stack"] is None
|
||||
or (len(r["stack"]) == 0 or len(r["stack"]) > 1
|
||||
|
|
@ -593,7 +613,7 @@ def __save_stacktrace(error_id, data):
|
|||
|
||||
|
||||
def get_trace(project_id, error_id):
|
||||
error = get(error_id=error_id)
|
||||
error = get(error_id=error_id, family=False)
|
||||
if error is None:
|
||||
return {"errors": ["error not found"]}
|
||||
if error.get("source", "") != "js_exception":
|
||||
|
|
@ -766,7 +786,7 @@ def format_first_stack_frame(error):
|
|||
def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(user_id)s)
|
||||
"""WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(userId)s)
|
||||
SELECT COUNT(timed_errors.*) AS unresolved_and_unviewed
|
||||
FROM (SELECT root_error.error_id
|
||||
FROM events.errors
|
||||
|
|
@ -780,7 +800,7 @@ def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTim
|
|||
AND user_viewed.error_id ISNULL
|
||||
LIMIT 1
|
||||
) AS timed_errors;""",
|
||||
{"project_id": project_id, "user_id": user_id, "startTimestamp": startTimestamp,
|
||||
{"project_id": project_id, "userId": user_id, "startTimestamp": startTimestamp,
|
||||
"endTimestamp": endTimestamp})
|
||||
cur.execute(query=query)
|
||||
row = cur.fetchone()
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@ const socketsListByProject = function (req, res) {
|
|||
if (process.env.uws !== "true") {
|
||||
res.statusCode = 200;
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
res.end(JSON.stringify());
|
||||
res.end(JSON.stringify(result));
|
||||
} else {
|
||||
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -213,7 +213,7 @@ function FunnelGraph(props) {
|
|||
tick ={{ fill: '#666', fontSize: 12 }}
|
||||
xAxisId={0}
|
||||
/>
|
||||
<XAxis
|
||||
{/* <XAxis
|
||||
stroke={0}
|
||||
xAxisId={1}
|
||||
dataKey="value"
|
||||
|
|
@ -222,7 +222,7 @@ function FunnelGraph(props) {
|
|||
dy={-15} dx={0}
|
||||
tick ={{ fill: '#666', fontSize: 12 }}
|
||||
tickFormatter={val => '"' + val + '"'}
|
||||
/>
|
||||
/> */}
|
||||
<YAxis interval={ 0 } strokeWidth={0} tick ={{ fill: '#999999', fontSize: 11 }} tickFormatter={val => Styles.tickFormatter(val)} />
|
||||
</BarChart>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import React, { useEffect, useState } from 'react';
|
||||
import { Icon, BackLink, IconButton, Dropdown, Popup, TextEllipsis, Button } from 'UI';
|
||||
import { remove as deleteFunnel, fetch, fetchInsights, fetchIssuesFiltered, fetchSessionsFiltered } from 'Duck/funnels';
|
||||
import { editFilter, addFilter } from 'Duck/funnels';
|
||||
import { editFilter, refresh, addFilter } from 'Duck/funnels';
|
||||
import DateRange from 'Shared/DateRange';
|
||||
import { connect } from 'react-redux';
|
||||
import { confirm } from 'UI/Confirmation';
|
||||
|
|
@ -19,15 +19,11 @@ const Info = ({ label = '', value = '', className = 'mx-4' }) => {
|
|||
|
||||
const FunnelHeader = (props) => {
|
||||
const { funnel, insights, funnels, onBack, funnelId, showFilters = false, renameHandler } = props;
|
||||
|
||||
const [showSaveModal, setShowSaveModal] = useState(false)
|
||||
|
||||
const writeOption = (e, { name, value }) => {
|
||||
props.fetch(value)
|
||||
props.fetchInsights(value, {})
|
||||
props.fetchIssuesFiltered(value, {})
|
||||
props.fetchSessionsFiltered(value, {})
|
||||
props.redirect(value)
|
||||
props.fetch(value).then(() => props.refresh(value))
|
||||
}
|
||||
|
||||
const deleteFunnel = async (e, funnel) => {
|
||||
|
|
@ -44,11 +40,12 @@ const FunnelHeader = (props) => {
|
|||
}
|
||||
|
||||
const onDateChange = (e) => {
|
||||
props.editFilter(e, funnel.funnelId);
|
||||
props.editFilter(e, funnelId);
|
||||
}
|
||||
|
||||
const options = funnels.map(({ funnelId, name }) => ({ text: name, value: funnelId })).toJS();
|
||||
const selectedFunnel = funnels.filter(i => i.funnelId === parseInt(funnelId)).first() || {};
|
||||
const eventsCount = funnel.filter.filters.filter(i => i.isEvent).size;
|
||||
|
||||
return (
|
||||
<div>
|
||||
|
|
@ -75,7 +72,7 @@ const FunnelHeader = (props) => {
|
|||
selectOnBlur={false}
|
||||
icon={ <Icon name="chevron-down" color="gray-dark" size="14" className={stl.dropdownIcon} /> }
|
||||
/>
|
||||
<Info label="Events" value={funnel.filter.filters.size} />
|
||||
<Info label="Events" value={eventsCount} />
|
||||
<span>-</span>
|
||||
<Button plain onClick={props.toggleFilters}>{ showFilters ? 'HIDE' : 'EDIT FUNNEL' }</Button>
|
||||
<Info label="Sessions" value={insights.sessionsCount} />
|
||||
|
|
@ -113,4 +110,4 @@ const FunnelHeader = (props) => {
|
|||
|
||||
export default connect(state => ({
|
||||
funnel: state.getIn([ 'funnels', 'instance' ]),
|
||||
}), { editFilter, deleteFunnel, fetch, fetchInsights, fetchIssuesFiltered, fetchSessionsFiltered })(FunnelHeader)
|
||||
}), { editFilter, deleteFunnel, fetch, fetchInsights, fetchIssuesFiltered, fetchSessionsFiltered, refresh })(FunnelHeader)
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ function FilterAutoComplete(props: Props) {
|
|||
.finally(() => setLoading(false));
|
||||
}
|
||||
|
||||
const debouncedRequestValues = React.useCallback(debounce(requestValues, 300), []);
|
||||
const debouncedRequestValues = React.useCallback(debounce(requestValues, 1000), []);
|
||||
|
||||
const onInputChange = ({ target: { value } }) => {
|
||||
setQuery(value);
|
||||
|
|
|
|||
|
|
@ -8,15 +8,16 @@ import { connect } from 'react-redux';
|
|||
interface Props {
|
||||
clearSearch: () => void;
|
||||
appliedFilter: any;
|
||||
optionsReady: boolean;
|
||||
}
|
||||
const MainSearchBar = (props: Props) => {
|
||||
const { appliedFilter } = props;
|
||||
const { appliedFilter, optionsReady } = props;
|
||||
const hasFilters = appliedFilter && appliedFilter.filters && appliedFilter.filters.size > 0;
|
||||
return (
|
||||
<div className="flex items-center">
|
||||
<div style={{ width: "60%", marginRight: "10px"}}><SessionSearchField /></div>
|
||||
<div className="flex items-center" style={{ width: "40%"}}>
|
||||
<SavedSearch />
|
||||
{optionsReady && <SavedSearch /> }
|
||||
<Popup
|
||||
trigger={
|
||||
<Button
|
||||
|
|
@ -39,4 +40,5 @@ const MainSearchBar = (props: Props) => {
|
|||
}
|
||||
export default connect(state => ({
|
||||
appliedFilter: state.getIn(['search', 'instance']),
|
||||
optionsReady: state.getIn(['customFields', 'optionsReady'])
|
||||
}), { clearSearch })(MainSearchBar);
|
||||
|
|
@ -31,6 +31,7 @@ const initialState = Map({
|
|||
list: List(),
|
||||
instance: CustomField(),
|
||||
sources: List(),
|
||||
optionsReady: false
|
||||
});
|
||||
|
||||
const reducer = (state = initialState, action = {}) => {
|
||||
|
|
@ -40,7 +41,8 @@ const reducer = (state = initialState, action = {}) => {
|
|||
addElementToFiltersMap(FilterCategory.METADATA, item.key);
|
||||
addElementToLiveFiltersMap(FilterCategory.METADATA, item.key);
|
||||
});
|
||||
return state.set('list', List(action.data).map(CustomField)) //.concat(defaultMeta))
|
||||
return state.set('list', List(action.data).map(CustomField))
|
||||
.set('optionsReady', true) //.concat(defaultMeta))
|
||||
case FETCH_SOURCES_SUCCESS:
|
||||
return state.set('sources', List(action.data.map(({ value, ...item}) => ({label: value, key: value, ...item}))).map(CustomField))
|
||||
case SAVE_SUCCESS:
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import { capitalize } from 'App/utils';
|
|||
const countryOptions = Object.keys(countries).map(i => ({ text: countries[i], value: i }));
|
||||
const containsFilters = [{ key: 'contains', text: 'contains', value: 'contains' }]
|
||||
|
||||
export const metaFilter = { key: FilterKey.METADATA, type: FilterType.MULTIPLE, category: FilterCategory.METADATA, label: 'Metadata', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/metadata' };
|
||||
export const filtersMap = {
|
||||
// EVENTS
|
||||
[FilterKey.CLICK]: { key: FilterKey.CLICK, type: FilterType.MULTIPLE, category: FilterCategory.INTERACTIONS, label: 'Click', operator: 'on', operatorOptions: filterOptions.targetOperators, icon: 'filters/click', isEvent: true },
|
||||
|
|
@ -31,7 +32,6 @@ export const filtersMap = {
|
|||
[FilterKey.ERROR]: { key: FilterKey.ERROR, type: FilterType.MULTIPLE, category: FilterCategory.JAVASCRIPT, label: 'Error', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/error', isEvent: true },
|
||||
// [FilterKey.METADATA]: { key: FilterKey.METADATA, type: FilterType.MULTIPLE, category: FilterCategory.METADATA, label: 'Metadata', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/metadata', isEvent: true },
|
||||
|
||||
|
||||
// FILTERS
|
||||
[FilterKey.USER_OS]: { key: FilterKey.USER_OS, type: FilterType.MULTIPLE, category: FilterCategory.GEAR, label: 'User OS', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/os' },
|
||||
[FilterKey.USER_BROWSER]: { key: FilterKey.USER_BROWSER, type: FilterType.MULTIPLE, category: FilterCategory.GEAR, label: 'User Browser', operator: 'is', operatorOptions: filterOptions.stringOperators, icon: 'filters/browser' },
|
||||
|
|
@ -133,7 +133,11 @@ export default Record({
|
|||
})
|
||||
_filter = subFilterMap[type]
|
||||
} else {
|
||||
_filter = filtersMap[type];
|
||||
if (type === FilterKey.METADATA) {
|
||||
_filter = filtersMap[filter.source];
|
||||
} else {
|
||||
_filter = filtersMap[type];
|
||||
}
|
||||
}
|
||||
return {
|
||||
...filter,
|
||||
|
|
|
|||
|
|
@ -51,8 +51,9 @@ export default Record({
|
|||
}
|
||||
},
|
||||
fromJS: ({ stages = [], filter, activeStages = null, ...rest }) => {
|
||||
let _stages = stages.map(stage => {
|
||||
stage.label = getRedableName(stage.type, stage.value);
|
||||
let _stages = stages.map((stage, index) => {
|
||||
// stage.label = getRedableName(stage.type, stage.value);
|
||||
stage.label = `Step ${index + 1}`;
|
||||
return stage;
|
||||
});
|
||||
|
||||
|
|
@ -70,16 +71,19 @@ export default Record({
|
|||
|
||||
return {
|
||||
...rest,
|
||||
stages: _stages.length > 0 ? _stages.map(stage => {
|
||||
stages: _stages.length > 0 ? _stages.map((stage, index) => {
|
||||
if (!stage) return;
|
||||
stage.label = getRedableName(stage);
|
||||
// stage.label = getRedableName(stage);
|
||||
stage.label = `Step ${index + 1}`;
|
||||
return stage;
|
||||
}) : [],
|
||||
affectedUsers,
|
||||
lostConversions,
|
||||
conversionImpact,
|
||||
firstStage: firstStage && firstStage.label + ' ' + truncate(firstStage.value || '', 10) || '',
|
||||
lastStage: lastStage && lastStage.label + ' ' + truncate(lastStage.value || '', 10) || '',
|
||||
// firstStage: firstStage && firstStage.label + ' ' + truncate(firstStage.value || '', 10) || '',
|
||||
// lastStage: lastStage && lastStage.label + ' ' + truncate(lastStage.value || '', 10) || '',
|
||||
firstStage: firstStage && firstStage.label || '',
|
||||
lastStage: lastStage && lastStage.label || '',
|
||||
filter: Filter(filter),
|
||||
sessionsCount: lastStage && lastStage.sessionsCount,
|
||||
stepsCount: stages ? stages.length : 0,
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue