Merge pull request #364 from openreplay/dev

Chore(release): v1.5.3
This commit is contained in:
Kraiem Taha Yassine 2022-03-08 21:40:20 +01:00 committed by GitHub
commit e2b1eb10b5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 23 additions and 7 deletions

View file

@ -20,8 +20,8 @@ def __try_live(project_id, data: schemas.CreateCustomMetricsSchema):
if data.view_type == schemas.MetricTimeseriesViewType.progress:
r = {"count": results[-1]}
diff = s.filter.endDate - s.filter.startDate
s.filter.startDate = data.endDate
s.filter.endDate = data.endDate - diff
s.filter.endDate = s.filter.startDate
s.filter.startDate = s.filter.endDate - diff
r["previousCount"] = sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
view_type=data.view_type, metric_type=data.metric_type,
metric_of=data.metric_of, metric_value=data.metric_value)

View file

@ -19,6 +19,12 @@ ALLOW_UPDATE_FOR = ["name", "filter"]
# events.event_type.VIEW_IOS.ui_type, events.event_type.CUSTOM_IOS.ui_type, ]
# return [s for s in stages if s["type"] in ALLOW_TYPES and s.get("value") is not None]
def __transform_old_funnels(events):
for e in events:
if not isinstance(e.get("value"), list):
e["value"] = [e["value"]]
return events
def create(project_id, user_id, name, filter: schemas.FunnelSearchPayloadSchema, is_public):
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
@ -97,6 +103,9 @@ def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
if details:
# row["filter"]["events"] = filter_stages(row["filter"]["events"])
if row.get("filter") is not None and row["filter"].get("events") is not None:
row["filter"]["events"] = __transform_old_funnels(row["filter"]["events"])
get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date,
end_date=end_date)
counts = sessions.search2_pg(data=schemas.SessionsSearchPayloadSchema.parse_obj(row["filter"]),
@ -248,7 +257,8 @@ def get(funnel_id, project_id, user_id, flatten=True):
f = helper.dict_to_camel_case(cur.fetchone())
if f is None:
return None
if f.get("filter") is not None and f["filter"].get("events") is not None:
f["filter"]["events"] = __transform_old_funnels(f["filter"]["events"])
f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"])
# f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
if flatten:

View file

@ -827,6 +827,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
is_any = _isAny_opreator(f.operator)
if is_any or len(f.value) == 0:
continue
f.value = helper.values_for_operator(value=f.value, op=f.operator)
op = __get_sql_operator(f.operator)
e_k_f = e_k + f"_fetch{j}"
full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)}
@ -837,7 +838,8 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
apply = True
elif f.type == schemas.FetchFilterType._status_code:
event_where.append(
_multiple_conditions(f"main.status_code {op} %({e_k_f})s", f.value, value_key=e_k_f))
_multiple_conditions(f"main.status_code {f.operator} %({e_k_f})s", f.value,
value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._method:
event_where.append(
@ -845,7 +847,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
apply = True
elif f.type == schemas.FetchFilterType._duration:
event_where.append(
_multiple_conditions(f"main.duration {op} %({e_k_f})s", f.value, value_key=e_k_f))
_multiple_conditions(f"main.duration {f.operator} %({e_k_f})s", f.value, value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._request_body:
event_where.append(
@ -865,6 +867,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
is_any = _isAny_opreator(f.operator)
if is_any or len(f.value) == 0:
continue
f.value = helper.values_for_operator(value=f.value, op=f.operator)
op = __get_sql_operator(f.operator)
e_k_f = e_k + f"_graphql{j}"
full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)}

View file

@ -545,7 +545,7 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
@dev.timed
def get_top_insights(filter_d, project_id):
output = []
stages = filter_d["events"]
stages = filter_d.get("events", [])
# TODO: handle 1 stage alone
if len(stages) == 0:
print("no stages found")

View file

@ -103,13 +103,16 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schem
@app.get('/{projectId}/events/search', tags=["events"])
def events_search(projectId: int, q: str,
type: Union[schemas.FilterType, schemas.EventType,
schemas.PerformanceEventType, schemas.FetchFilterType] = None,
schemas.PerformanceEventType, schemas.FetchFilterType,
schemas.GraphqlFilterType] = None,
key: str = None,
source: str = None, context: schemas.CurrentContext = Depends(OR_context)):
if len(q) == 0:
return {"data": []}
if type in [schemas.FetchFilterType._url]:
type = schemas.EventType.request
elif type in [schemas.GraphqlFilterType._name]:
type = schemas.EventType.graphql
elif isinstance(type, schemas.PerformanceEventType):
if type in [schemas.PerformanceEventType.location_dom_complete,
schemas.PerformanceEventType.location_largest_contentful_paint_time,