feat(chalice): funnels cleaned
feat(chalice): funnels refactored feat(chalice): funnels merged
This commit is contained in:
parent
d10453f58e
commit
a332606580
6 changed files with 4 additions and 509 deletions
|
|
@ -1,15 +1,9 @@
|
|||
import json
|
||||
from typing import List
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import significance, sessions
|
||||
from chalicelib.utils import helper, pg_client
|
||||
from chalicelib.core import significance
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
REMOVE_KEYS = ["key", "_key", "startDate", "endDate"]
|
||||
|
||||
ALLOW_UPDATE_FOR = ["name", "filter"]
|
||||
|
||||
|
||||
def filter_stages(stages: List[schemas._SessionSearchEventSchema]):
|
||||
|
|
@ -24,10 +18,6 @@ def __parse_events(f_events: List[dict]):
|
|||
return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events]
|
||||
|
||||
|
||||
def __unparse_events(f_events: List[schemas._SessionSearchEventSchema]):
|
||||
return [e.dict() for e in f_events]
|
||||
|
||||
|
||||
def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]):
|
||||
if f_events is None:
|
||||
return
|
||||
|
|
@ -45,95 +35,6 @@ def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]):
|
|||
return events
|
||||
|
||||
|
||||
def __transform_old_funnels(events):
|
||||
for e in events:
|
||||
if not isinstance(e.get("value"), list):
|
||||
e["value"] = [e["value"]]
|
||||
return events
|
||||
|
||||
|
||||
def get_possible_issue_types(project_id):
|
||||
return [{"type": t, "title": helper.get_issue_title(t)} for t in
|
||||
['click_rage', 'dead_click', 'excessive_scrolling',
|
||||
'bad_request', 'missing_resource', 'memory', 'cpu',
|
||||
'slow_resource', 'slow_page_load', 'crash', 'custom_event_error',
|
||||
'js_error']]
|
||||
|
||||
|
||||
def get_start_end_time(filter_d, range_value, start_date, end_date):
|
||||
if start_date is not None and end_date is not None:
|
||||
filter_d["startDate"], filter_d["endDate"] = start_date, end_date
|
||||
elif range_value is not None and len(range_value) > 0:
|
||||
filter_d["rangeValue"] = range_value
|
||||
filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(range_value)
|
||||
else:
|
||||
filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(filter_d["rangeValue"])
|
||||
|
||||
|
||||
def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=None, end_date=None):
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
|
||||
return sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(f["filter"]),
|
||||
project_id=project_id,
|
||||
user_id=user_id)
|
||||
|
||||
|
||||
def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema):
|
||||
data.events = filter_stages(data.events)
|
||||
data.events = __fix_stages(data.events)
|
||||
if len(data.events) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.range_value,
|
||||
start_date=data.startDate, end_date=data.endDate)
|
||||
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
|
||||
return sessions.search_sessions(data=data, project_id=project_id,
|
||||
user_id=user_id)
|
||||
|
||||
|
||||
def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
|
||||
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=f["filter"], project_id=project_id)
|
||||
insights = helper.list_to_camel_case(insights)
|
||||
if len(insights) > 0:
|
||||
# fix: this fix for huge drop count
|
||||
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
|
||||
total_drop_due_to_issues = insights[0]["sessionsCount"]
|
||||
# end fix
|
||||
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
|
||||
return {"data": {"stages": insights,
|
||||
"totalDropDueToIssues": total_drop_due_to_issues}}
|
||||
|
||||
|
||||
def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema):
|
||||
data.events = filter_stages(__parse_events(data.events))
|
||||
if len(data.events) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
|
||||
start_date=data.startDate,
|
||||
end_date=data.endDate)
|
||||
data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"])
|
||||
data.events = __fix_stages(data.events)
|
||||
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
|
||||
insights = helper.list_to_camel_case(insights)
|
||||
if len(insights) > 0:
|
||||
# fix: this fix for huge drop count
|
||||
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
|
||||
total_drop_due_to_issues = insights[0]["sessionsCount"]
|
||||
# end fix
|
||||
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
|
||||
return {"data": {"stages": insights,
|
||||
"totalDropDueToIssues": total_drop_due_to_issues}}
|
||||
|
||||
|
||||
# def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema):
|
||||
def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema):
|
||||
data.events = filter_stages(__parse_events(data.events))
|
||||
|
|
@ -154,35 +55,6 @@ def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilte
|
|||
"totalDropDueToIssues": total_drop_due_to_issues}
|
||||
|
||||
|
||||
def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
|
||||
return {"data": {
|
||||
"issues": helper.dict_to_camel_case(significance.get_issues_list(filter_d=f["filter"], project_id=project_id))
|
||||
}}
|
||||
|
||||
|
||||
def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema):
|
||||
data.events = filter_stages(data.events)
|
||||
data.events = __fix_stages(data.events)
|
||||
if len(data.events) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
|
||||
start_date=data.startDate,
|
||||
end_date=data.endDate)
|
||||
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
|
||||
if len(data.events) < 2:
|
||||
return {"issues": []}
|
||||
return {
|
||||
"issues": helper.dict_to_camel_case(
|
||||
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
|
||||
last_stage=len(data.events)))}
|
||||
|
||||
|
||||
# def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema):
|
||||
def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema):
|
||||
data.events = filter_stages(data.events)
|
||||
|
|
@ -194,35 +66,3 @@ def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchem
|
|||
"issues": helper.dict_to_camel_case(
|
||||
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
|
||||
last_stage=len(data.events)))}
|
||||
|
||||
|
||||
def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""\
|
||||
SELECT
|
||||
*
|
||||
FROM public.funnels
|
||||
WHERE project_id = %(project_id)s
|
||||
AND deleted_at IS NULL
|
||||
AND funnel_id = %(funnel_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
)
|
||||
|
||||
f = helper.dict_to_camel_case(cur.fetchone())
|
||||
if f is None:
|
||||
return None
|
||||
if f.get("filter") is not None and f["filter"].get("events") is not None:
|
||||
f["filter"]["events"] = __transform_old_funnels(f["filter"]["events"])
|
||||
f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"])
|
||||
f["filter"]["events"] = __parse_events(f["filter"]["events"])
|
||||
f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
|
||||
if fix_stages:
|
||||
f["filter"]["events"] = __fix_stages(f["filter"]["events"])
|
||||
f["filter"]["events"] = [e.dict() for e in f["filter"]["events"]]
|
||||
if flatten:
|
||||
f["filter"] = helper.old_search_payload_to_flat(f["filter"])
|
||||
return f
|
||||
|
|
|
|||
|
|
@ -1086,39 +1086,6 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
|
|||
return results
|
||||
|
||||
|
||||
def search_by_issue(user_id, issue, project_id, start_date, end_date):
|
||||
constraints = ["s.project_id = %(projectId)s",
|
||||
"p_issues.context_string = %(issueContextString)s",
|
||||
"p_issues.type = %(issueType)s"]
|
||||
if start_date is not None:
|
||||
constraints.append("start_ts >= %(startDate)s")
|
||||
if end_date is not None:
|
||||
constraints.append("start_ts <= %(endDate)s")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""SELECT DISTINCT ON(favorite_sessions.session_id, s.session_id) {SESSION_PROJECTION_COLS}
|
||||
FROM public.sessions AS s
|
||||
INNER JOIN events_common.issues USING (session_id)
|
||||
INNER JOIN public.issues AS p_issues USING (issue_id)
|
||||
LEFT JOIN (SELECT user_id, session_id
|
||||
FROM public.user_favorite_sessions
|
||||
WHERE user_id = %(userId)s) AS favorite_sessions
|
||||
USING (session_id)
|
||||
WHERE {" AND ".join(constraints)}
|
||||
ORDER BY s.session_id DESC;""",
|
||||
{
|
||||
"issueContextString": issue["contextString"],
|
||||
"issueType": issue["type"], "userId": user_id,
|
||||
"projectId": project_id,
|
||||
"startDate": start_date,
|
||||
"endDate": end_date
|
||||
}))
|
||||
|
||||
rows = cur.fetchall()
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
def get_user_sessions(project_id, user_id, start_date, end_date):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"]
|
||||
|
|
|
|||
|
|
@ -611,67 +611,6 @@ def delete_alert(projectId: int, alertId: int, context: schemas.CurrentContext =
|
|||
return alerts.delete(projectId, alertId)
|
||||
|
||||
|
||||
@app.get('/{projectId}/funnels/issue_types', tags=["funnels"])
|
||||
def get_possible_issue_types(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": funnels.get_possible_issue_types(project_id=projectId)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/funnels/{funnelId}/insights', tags=["funnels"])
|
||||
def get_funnel_insights(projectId: int, funnelId: int, rangeValue: str = None, startDate: int = None,
|
||||
endDate: int = None, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return funnels.get_top_insights(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
|
||||
range_value=rangeValue, start_date=startDate, end_date=endDate)
|
||||
|
||||
|
||||
@app.post('/{projectId}/funnels/{funnelId}/insights', tags=["funnels"])
|
||||
def get_funnel_insights_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelInsightsPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return funnels.get_top_insights_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
|
||||
data=data)
|
||||
|
||||
|
||||
@app.get('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"])
|
||||
def get_funnel_issues(projectId: int, funnelId, rangeValue: str = None, startDate: int = None, endDate: int = None,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return funnels.get_issues(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
|
||||
range_value=rangeValue, start_date=startDate, end_date=endDate)
|
||||
|
||||
|
||||
@app.post('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"])
|
||||
def get_funnel_issues_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": funnels.get_issues_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
|
||||
data=data)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"])
|
||||
def get_funnel_sessions(projectId: int, funnelId: int, rangeValue: str = None, startDate: int = None,
|
||||
endDate: int = None, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": funnels.get_sessions(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
|
||||
range_value=rangeValue,
|
||||
start_date=startDate,
|
||||
end_date=endDate)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"])
|
||||
def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": funnels.get_sessions_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
|
||||
data=data)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/funnels/issues/{issueId}/sessions', tags=["funnels"])
|
||||
def get_funnel_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
issue = issues.get(project_id=projectId, issue_id=issueId)
|
||||
if issue is None:
|
||||
return {"errors": ["issue not found"]}
|
||||
return {
|
||||
"data": {"sessions": sessions.search_by_issue(user_id=context.user_id, project_id=projectId, issue=issue,
|
||||
start_date=startDate, end_date=endDate),
|
||||
"issue": issue}}
|
||||
|
||||
|
||||
@app_apikey.put('/{projectKey}/sourcemaps/', tags=["sourcemaps"])
|
||||
@app_apikey.put('/{projectKey}/sourcemaps', tags=["sourcemaps"])
|
||||
def sign_sourcemap_for_upload(projectKey: str, data: schemas.SourcemapUploadPayloadSchema = Body(...),
|
||||
|
|
|
|||
2
ee/api/.gitignore
vendored
2
ee/api/.gitignore
vendored
|
|
@ -194,7 +194,7 @@ Pipfile.lock
|
|||
/chalicelib/core/errors_favorite.py
|
||||
#exp /chalicelib/core/events.py
|
||||
/chalicelib/core/events_ios.py
|
||||
#exp /chalicelib/core/funnels.py
|
||||
/chalicelib/core/funnels.py
|
||||
/chalicelib/core/integration_base.py
|
||||
/chalicelib/core/integration_base_issue.py
|
||||
/chalicelib/core/integration_github.py
|
||||
|
|
|
|||
|
|
@ -1,251 +0,0 @@
|
|||
import json
|
||||
from typing import List
|
||||
|
||||
import chalicelib.utils.helper
|
||||
import schemas
|
||||
from chalicelib.core import significance
|
||||
from chalicelib.utils import helper, pg_client
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
from decouple import config
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
from chalicelib.core import sessions_legacy as sessions
|
||||
else:
|
||||
from chalicelib.core import sessions
|
||||
|
||||
REMOVE_KEYS = ["key", "_key", "startDate", "endDate"]
|
||||
|
||||
ALLOW_UPDATE_FOR = ["name", "filter"]
|
||||
|
||||
|
||||
def filter_stages(stages: List[schemas._SessionSearchEventSchema]):
|
||||
ALLOW_TYPES = [schemas.EventType.click, schemas.EventType.input,
|
||||
schemas.EventType.location, schemas.EventType.custom,
|
||||
schemas.EventType.click_ios, schemas.EventType.input_ios,
|
||||
schemas.EventType.view_ios, schemas.EventType.custom_ios, ]
|
||||
return [s for s in stages if s.type in ALLOW_TYPES and s.value is not None]
|
||||
|
||||
|
||||
def __parse_events(f_events: List[dict]):
|
||||
return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events]
|
||||
|
||||
|
||||
def __unparse_events(f_events: List[schemas._SessionSearchEventSchema]):
|
||||
return [e.dict() for e in f_events]
|
||||
|
||||
|
||||
def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]):
|
||||
if f_events is None:
|
||||
return
|
||||
events = []
|
||||
for e in f_events:
|
||||
if e.operator is None:
|
||||
e.operator = schemas.SearchEventOperator._is
|
||||
|
||||
if not isinstance(e.value, list):
|
||||
e.value = [e.value]
|
||||
is_any = sh.isAny_opreator(e.operator)
|
||||
if not is_any and isinstance(e.value, list) and len(e.value) == 0:
|
||||
continue
|
||||
events.append(e)
|
||||
return events
|
||||
|
||||
|
||||
def __transform_old_funnels(events):
|
||||
for e in events:
|
||||
if not isinstance(e.get("value"), list):
|
||||
e["value"] = [e["value"]]
|
||||
return events
|
||||
|
||||
|
||||
def get_possible_issue_types(project_id):
|
||||
return [{"type": t, "title": helper.get_issue_title(t)} for t in
|
||||
['click_rage', 'dead_click', 'excessive_scrolling',
|
||||
'bad_request', 'missing_resource', 'memory', 'cpu',
|
||||
'slow_resource', 'slow_page_load', 'crash', 'custom_event_error',
|
||||
'js_error']]
|
||||
|
||||
|
||||
def get_start_end_time(filter_d, range_value, start_date, end_date):
|
||||
if start_date is not None and end_date is not None:
|
||||
filter_d["startDate"], filter_d["endDate"] = start_date, end_date
|
||||
elif range_value is not None and len(range_value) > 0:
|
||||
filter_d["rangeValue"] = range_value
|
||||
filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(range_value)
|
||||
else:
|
||||
filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(filter_d["rangeValue"])
|
||||
|
||||
|
||||
def delete(project_id, funnel_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.funnels
|
||||
SET deleted_at = timezone('utc'::text, now())
|
||||
WHERE project_id = %(project_id)s
|
||||
AND funnel_id = %(funnel_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id})
|
||||
)
|
||||
|
||||
return {"data": {"state": "success"}}
|
||||
|
||||
|
||||
def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=None, end_date=None):
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
|
||||
return sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(f["filter"]),
|
||||
project_id=project_id,
|
||||
user_id=user_id)
|
||||
|
||||
|
||||
def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema):
|
||||
data.events = filter_stages(data.events)
|
||||
data.events = __fix_stages(data.events)
|
||||
if len(data.events) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.range_value,
|
||||
start_date=data.startDate, end_date=data.endDate)
|
||||
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
|
||||
return sessions.search_sessions(data=data, project_id=project_id,
|
||||
user_id=user_id)
|
||||
|
||||
|
||||
def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
|
||||
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=f["filter"], project_id=project_id)
|
||||
insights = helper.list_to_camel_case(insights)
|
||||
if len(insights) > 0:
|
||||
# fix: this fix for huge drop count
|
||||
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
|
||||
total_drop_due_to_issues = insights[0]["sessionsCount"]
|
||||
# end fix
|
||||
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
|
||||
return {"data": {"stages": insights,
|
||||
"totalDropDueToIssues": total_drop_due_to_issues}}
|
||||
|
||||
|
||||
def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema):
|
||||
data.events = filter_stages(__parse_events(data.events))
|
||||
if len(data.events) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
|
||||
start_date=data.startDate,
|
||||
end_date=data.endDate)
|
||||
data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"])
|
||||
data.events = __fix_stages(data.events)
|
||||
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
|
||||
insights = helper.list_to_camel_case(insights)
|
||||
if len(insights) > 0:
|
||||
# fix: this fix for huge drop count
|
||||
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
|
||||
total_drop_due_to_issues = insights[0]["sessionsCount"]
|
||||
# end fix
|
||||
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
|
||||
return {"data": {"stages": insights,
|
||||
"totalDropDueToIssues": total_drop_due_to_issues}}
|
||||
|
||||
|
||||
# def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema):
|
||||
def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema):
|
||||
data.events = filter_stages(__parse_events(data.events))
|
||||
data.events = __fix_stages(data.events)
|
||||
if len(data.events) == 0:
|
||||
return {"stages": [], "totalDropDueToIssues": 0}
|
||||
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
|
||||
insights = helper.list_to_camel_case(insights)
|
||||
if len(insights) > 0:
|
||||
# TODO: check if this correct
|
||||
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
|
||||
if len(insights) == 0:
|
||||
total_drop_due_to_issues = 0
|
||||
else:
|
||||
total_drop_due_to_issues = insights[0]["sessionsCount"] - insights[-1]["sessionsCount"]
|
||||
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
|
||||
return {"stages": insights,
|
||||
"totalDropDueToIssues": total_drop_due_to_issues}
|
||||
|
||||
|
||||
def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
|
||||
return {"data": {
|
||||
"issues": helper.dict_to_camel_case(significance.get_issues_list(filter_d=f["filter"], project_id=project_id))
|
||||
}}
|
||||
|
||||
|
||||
def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema):
|
||||
data.events = filter_stages(data.events)
|
||||
data.events = __fix_stages(data.events)
|
||||
if len(data.events) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
|
||||
start_date=data.startDate,
|
||||
end_date=data.endDate)
|
||||
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
|
||||
if len(data.events) < 2:
|
||||
return {"issues": []}
|
||||
return {
|
||||
"issues": helper.dict_to_camel_case(
|
||||
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
|
||||
last_stage=len(data.events)))}
|
||||
|
||||
|
||||
# def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema):
|
||||
def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema):
|
||||
data.events = filter_stages(data.events)
|
||||
data.events = __fix_stages(data.events)
|
||||
if len(data.events) < 0:
|
||||
return {"issues": []}
|
||||
|
||||
return {
|
||||
"issues": helper.dict_to_camel_case(
|
||||
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
|
||||
last_stage=len(data.events)))}
|
||||
|
||||
|
||||
def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""\
|
||||
SELECT
|
||||
*
|
||||
FROM public.funnels
|
||||
WHERE project_id = %(project_id)s
|
||||
AND deleted_at IS NULL
|
||||
AND funnel_id = %(funnel_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
)
|
||||
|
||||
f = helper.dict_to_camel_case(cur.fetchone())
|
||||
if f is None:
|
||||
return None
|
||||
if f.get("filter") is not None and f["filter"].get("events") is not None:
|
||||
f["filter"]["events"] = __transform_old_funnels(f["filter"]["events"])
|
||||
f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"])
|
||||
f["filter"]["events"] = __parse_events(f["filter"]["events"])
|
||||
f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
|
||||
if fix_stages:
|
||||
f["filter"]["events"] = __fix_stages(f["filter"]["events"])
|
||||
f["filter"]["events"] = [e.dict() for e in f["filter"]["events"]]
|
||||
if flatten:
|
||||
f["filter"] = helper.old_search_payload_to_flat(f["filter"])
|
||||
return f
|
||||
|
|
@ -14,7 +14,7 @@ rm -rf ./chalicelib/core/errors_favorite.py
|
|||
#exp rm -rf ./chalicelib/core/events.py
|
||||
rm -rf ./chalicelib/core/events_ios.py
|
||||
rm -rf ./chalicelib/core/dashboards.py
|
||||
#exp rm -rf ./chalicelib/core/funnels.py
|
||||
rm -rf ./chalicelib/core/funnels.py
|
||||
rm -rf ./chalicelib/core/integration_base.py
|
||||
rm -rf ./chalicelib/core/integration_base_issue.py
|
||||
rm -rf ./chalicelib/core/integration_github.py
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue