Merge remote-tracking branch 'origin/api-v1.9.5' into dev

This commit is contained in:
Taha Yassine Kraiem 2023-01-24 10:36:58 +01:00
commit 68eefe35f8
26 changed files with 108 additions and 948 deletions

View file

@ -51,7 +51,7 @@ def search_short_session(data: schemas.FlatClickMapSessionsSearch, project_id, u
main_query = cur.mogrify(f"""SELECT {SESSION_PROJECTION_COLS}
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
{query_part}
ORDER BY {data.sort} {data.order}
ORDER BY {data.sort} {data.order.value}
LIMIT 1;""", full_args)
# print("--------------------")
# print(main_query)

View file

@ -396,7 +396,7 @@ def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_ser
AND users.user_id = metrics.user_id
) AS owner ON (TRUE)
WHERE {" AND ".join(constraints)}
ORDER BY created_at {data.order}
ORDER BY created_at {data.order.value}
LIMIT %(limit)s OFFSET %(offset)s;""", params)
cur.execute(query)
rows = cur.fetchall()

View file

@ -1,15 +1,9 @@
import json
from typing import List
import schemas
from chalicelib.core import significance, sessions
from chalicelib.utils import helper, pg_client
from chalicelib.core import significance
from chalicelib.utils import helper
from chalicelib.utils import sql_helper as sh
from chalicelib.utils.TimeUTC import TimeUTC
REMOVE_KEYS = ["key", "_key", "startDate", "endDate"]
ALLOW_UPDATE_FOR = ["name", "filter"]
def filter_stages(stages: List[schemas._SessionSearchEventSchema]):
@ -24,10 +18,6 @@ def __parse_events(f_events: List[dict]):
return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events]
def __unparse_events(f_events: List[schemas._SessionSearchEventSchema]):
return [e.dict() for e in f_events]
def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]):
if f_events is None:
return
@ -45,212 +35,6 @@ def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]):
return events
def __transform_old_funnels(events):
for e in events:
if not isinstance(e.get("value"), list):
e["value"] = [e["value"]]
return events
def create(project_id, user_id, name, filter: schemas.FunnelSearchPayloadSchema, is_public):
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
filter.events = filter_stages(stages=filter.events)
with pg_client.PostgresClient() as cur:
query = cur.mogrify("""\
INSERT INTO public.funnels (project_id, user_id, name, filter,is_public)
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(filter)s::jsonb,%(is_public)s)
RETURNING *;""",
{"user_id": user_id, "project_id": project_id, "name": name,
"filter": json.dumps(filter.dict()),
"is_public": is_public})
cur.execute(
query
)
r = cur.fetchone()
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r = helper.dict_to_camel_case(r)
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
return {"data": r}
def update(funnel_id, user_id, project_id, name=None, filter=None, is_public=None):
s_query = []
if filter is not None:
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
s_query.append("filter = %(filter)s::jsonb")
if name is not None and len(name) > 0:
s_query.append("name = %(name)s")
if is_public is not None:
s_query.append("is_public = %(is_public)s")
if len(s_query) == 0:
return {"errors": ["Nothing to update"]}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
UPDATE public.funnels
SET {" , ".join(s_query)}
WHERE funnel_id=%(funnel_id)s
AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)
RETURNING *;""", {"user_id": user_id, "funnel_id": funnel_id, "name": name,
"filter": json.dumps(filter) if filter is not None else None, "is_public": is_public,
"project_id": project_id})
# print("--------------------")
# print(query)
# print("--------------------")
cur.execute(
query
)
r = cur.fetchone()
if r is None:
return {"errors": ["funnel not found"]}
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r = helper.dict_to_camel_case(r)
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
r["filter"] = helper.old_search_payload_to_flat(r["filter"])
return {"data": r}
def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date=None, details=False):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""\
SELECT funnel_id, project_id, user_id, name, created_at, deleted_at, is_public
{",filter" if details else ""}
FROM public.funnels
WHERE project_id = %(project_id)s
AND funnels.deleted_at IS NULL
AND (funnels.user_id = %(user_id)s OR funnels.is_public);""",
{"project_id": project_id, "user_id": user_id}
)
)
rows = cur.fetchall()
rows = helper.list_to_camel_case(rows)
for row in rows:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
if details:
row["filter"]["events"] = filter_stages(__parse_events(row["filter"]["events"]))
if row.get("filter") is not None and row["filter"].get("events") is not None:
row["filter"]["events"] = __transform_old_funnels(__unparse_events(row["filter"]["events"]))
get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date,
end_date=end_date)
counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(row["filter"]),
project_id=project_id, user_id=None, count_only=True)
row["sessionsCount"] = counts["countSessions"]
row["usersCount"] = counts["countUsers"]
filter_clone = dict(row["filter"])
overview = significance.get_overview(filter_d=row["filter"], project_id=project_id)
row["stages"] = overview["stages"]
row.pop("filter")
row["stagesCount"] = len(row["stages"])
# TODO: ask david to count it alone
row["criticalIssuesCount"] = overview["criticalIssuesCount"]
row["missedConversions"] = 0 if len(row["stages"]) < 2 \
else row["stages"][0]["sessionsCount"] - row["stages"][-1]["sessionsCount"]
row["filter"] = helper.old_search_payload_to_flat(filter_clone)
return rows
def get_possible_issue_types(project_id):
return [{"type": t, "title": helper.get_issue_title(t)} for t in
['click_rage', 'dead_click', 'excessive_scrolling',
'bad_request', 'missing_resource', 'memory', 'cpu',
'slow_resource', 'slow_page_load', 'crash', 'custom_event_error',
'js_error']]
def get_start_end_time(filter_d, range_value, start_date, end_date):
if start_date is not None and end_date is not None:
filter_d["startDate"], filter_d["endDate"] = start_date, end_date
elif range_value is not None and len(range_value) > 0:
filter_d["rangeValue"] = range_value
filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(range_value)
else:
filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(filter_d["rangeValue"])
def delete(project_id, funnel_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
UPDATE public.funnels
SET deleted_at = timezone('utc'::text, now())
WHERE project_id = %(project_id)s
AND funnel_id = %(funnel_id)s
AND (user_id = %(user_id)s OR is_public);""",
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id})
)
return {"data": {"state": "success"}}
def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=None, end_date=None):
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
return sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(f["filter"]),
project_id=project_id,
user_id=user_id)
def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema):
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.range_value,
start_date=data.startDate, end_date=data.endDate)
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
return sessions.search_sessions(data=data, project_id=project_id,
user_id=user_id)
def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=f["filter"], project_id=project_id)
insights = helper.list_to_camel_case(insights)
if len(insights) > 0:
# fix: this fix for huge drop count
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
total_drop_due_to_issues = insights[0]["sessionsCount"]
# end fix
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
return {"data": {"stages": insights,
"totalDropDueToIssues": total_drop_due_to_issues}}
def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema):
data.events = filter_stages(__parse_events(data.events))
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
start_date=data.startDate,
end_date=data.endDate)
data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"])
data.events = __fix_stages(data.events)
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
insights = helper.list_to_camel_case(insights)
if len(insights) > 0:
# fix: this fix for huge drop count
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
total_drop_due_to_issues = insights[0]["sessionsCount"]
# end fix
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
return {"data": {"stages": insights,
"totalDropDueToIssues": total_drop_due_to_issues}}
# def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema):
def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema):
data.events = filter_stages(__parse_events(data.events))
@ -271,35 +55,6 @@ def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilte
"totalDropDueToIssues": total_drop_due_to_issues}
def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
return {"data": {
"issues": helper.dict_to_camel_case(significance.get_issues_list(filter_d=f["filter"], project_id=project_id))
}}
def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema):
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
start_date=data.startDate,
end_date=data.endDate)
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
if len(data.events) < 2:
return {"issues": []}
return {
"issues": helper.dict_to_camel_case(
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
last_stage=len(data.events)))}
# def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema):
def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema):
data.events = filter_stages(data.events)
@ -311,62 +66,3 @@ def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchem
"issues": helper.dict_to_camel_case(
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
last_stage=len(data.events)))}
def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""\
SELECT
*
FROM public.funnels
WHERE project_id = %(project_id)s
AND deleted_at IS NULL
AND funnel_id = %(funnel_id)s
AND (user_id = %(user_id)s OR is_public);""",
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id}
)
)
f = helper.dict_to_camel_case(cur.fetchone())
if f is None:
return None
if f.get("filter") is not None and f["filter"].get("events") is not None:
f["filter"]["events"] = __transform_old_funnels(f["filter"]["events"])
f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"])
f["filter"]["events"] = __parse_events(f["filter"]["events"])
f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
if fix_stages:
f["filter"]["events"] = __fix_stages(f["filter"]["events"])
f["filter"]["events"] = [e.dict() for e in f["filter"]["events"]]
if flatten:
f["filter"] = helper.old_search_payload_to_flat(f["filter"])
return f
def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.FunnelSearchPayloadSchema, range_value=None,
start_date=None, end_date=None):
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
data.startDate = data.startDate if data.startDate is not None else start_date
data.endDate = data.endDate if data.endDate is not None else end_date
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=data.startDate,
end_date=data.endDate)
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data) \
.get("issues", {})
issues = issues.get("significant", []) + issues.get("insignificant", [])
issue = None
for i in issues:
if i.get("issueId", "") == issue_id:
issue = i
break
return {"sessions": sessions.search_sessions(user_id=user_id, project_id=project_id, issue=issue,
data=data) if issue is not None else {"total": 0, "sessions": []},
# "stages": helper.list_to_camel_case(insights),
# "totalDropDueToIssues": total_drop_due_to_issues,
"issue": issue}

View file

@ -9,49 +9,49 @@ def get_global_integrations_status(tenant_id, user_id, project_id):
SELECT EXISTS((SELECT 1
FROM public.oauth_authentication
WHERE user_id = %(user_id)s
AND provider = 'github')) AS {schemas.IntegrationType.github},
AND provider = 'github')) AS {schemas.IntegrationType.github.value},
EXISTS((SELECT 1
FROM public.jira_cloud
WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.jira},
WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.jira.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='bugsnag')) AS {schemas.IntegrationType.bugsnag},
AND provider='bugsnag')) AS {schemas.IntegrationType.bugsnag.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='cloudwatch')) AS {schemas.IntegrationType.cloudwatch},
AND provider='cloudwatch')) AS {schemas.IntegrationType.cloudwatch.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='datadog')) AS {schemas.IntegrationType.datadog},
AND provider='datadog')) AS {schemas.IntegrationType.datadog.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='newrelic')) AS {schemas.IntegrationType.newrelic},
AND provider='newrelic')) AS {schemas.IntegrationType.newrelic.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='rollbar')) AS {schemas.IntegrationType.rollbar},
AND provider='rollbar')) AS {schemas.IntegrationType.rollbar.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='sentry')) AS {schemas.IntegrationType.sentry},
AND provider='sentry')) AS {schemas.IntegrationType.sentry.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='stackdriver')) AS {schemas.IntegrationType.stackdriver},
AND provider='stackdriver')) AS {schemas.IntegrationType.stackdriver.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='sumologic')) AS {schemas.IntegrationType.sumologic},
AND provider='sumologic')) AS {schemas.IntegrationType.sumologic.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='elasticsearch')) AS {schemas.IntegrationType.elasticsearch},
AND provider='elasticsearch')) AS {schemas.IntegrationType.elasticsearch.value},
EXISTS((SELECT 1
FROM public.webhooks
WHERE type='slack')) AS {schemas.IntegrationType.slack};""",
WHERE type='slack')) AS {schemas.IntegrationType.slack.value};""",
{"user_id": user_id, "tenant_id": tenant_id, "project_id": project_id})
)
current_integrations = cur.fetchone()

View file

@ -151,9 +151,9 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
elif data.group_by_user:
g_sort = "count(full_sessions)"
if data.order is None:
data.order = schemas.SortOrderType.desc
data.order = schemas.SortOrderType.desc.value
else:
data.order = data.order.upper()
data.order = data.order.value
if data.sort is not None and data.sort != 'sessionsCount':
sort = helper.key_to_snake_case(data.sort)
g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})"
@ -186,7 +186,9 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
full_args)
else:
if data.order is None:
data.order = schemas.SortOrderType.desc
data.order = schemas.SortOrderType.desc.value
else:
data.order = data.order.value
sort = 'session_id'
if data.sort is not None and data.sort != "session_id":
# sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
@ -202,9 +204,9 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
ORDER BY s.session_id desc) AS filtred_sessions
ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""",
full_args)
print("--------------------")
print(main_query)
print("--------------------")
# print("--------------------")
# print(main_query)
# print("--------------------")
try:
cur.execute(main_query)
except Exception as err:
@ -797,7 +799,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)}
event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " +
sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator.value} %({e_k})s",
event.source, value_key=e_k))
elif event_type == schemas.PerformanceEventType.time_between_events:
event_from = event_from % f"{getattr(events.EventType, event.value[0].type).table} AS main INNER JOIN {getattr(events.EventType, event.value[1].type).table} AS main2 USING(session_id) "
@ -835,7 +837,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
e_k += "_custom"
full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)}
event_where.append(
sh.multi_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator} %({e_k})s",
sh.multi_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator.value} %({e_k})s",
event.source, value_key=e_k))
elif event_type == schemas.EventType.request_details:
@ -1084,39 +1086,6 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
return results
def search_by_issue(user_id, issue, project_id, start_date, end_date):
constraints = ["s.project_id = %(projectId)s",
"p_issues.context_string = %(issueContextString)s",
"p_issues.type = %(issueType)s"]
if start_date is not None:
constraints.append("start_ts >= %(startDate)s")
if end_date is not None:
constraints.append("start_ts <= %(endDate)s")
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""SELECT DISTINCT ON(favorite_sessions.session_id, s.session_id) {SESSION_PROJECTION_COLS}
FROM public.sessions AS s
INNER JOIN events_common.issues USING (session_id)
INNER JOIN public.issues AS p_issues USING (issue_id)
LEFT JOIN (SELECT user_id, session_id
FROM public.user_favorite_sessions
WHERE user_id = %(userId)s) AS favorite_sessions
USING (session_id)
WHERE {" AND ".join(constraints)}
ORDER BY s.session_id DESC;""",
{
"issueContextString": issue["contextString"],
"issueType": issue["type"], "userId": user_id,
"projectId": project_id,
"startDate": start_date,
"endDate": end_date
}))
rows = cur.fetchall()
return helper.list_to_camel_case(rows)
def get_user_sessions(project_id, user_id, start_date, end_date):
with pg_client.PostgresClient() as cur:
constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"]

View file

@ -69,7 +69,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se
query = cur.mogrify(f"""SELECT sessions_notes.*
FROM sessions_notes
WHERE {" AND ".join(conditions)}
ORDER BY created_at {data.order}
ORDER BY created_at {data.order.value}
LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""",
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params})

View file

@ -611,137 +611,6 @@ def delete_alert(projectId: int, alertId: int, context: schemas.CurrentContext =
return alerts.delete(projectId, alertId)
@app.post('/{projectId}/funnels', tags=["funnels"])
def add_funnel(projectId: int, data: schemas.FunnelSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return funnels.create(project_id=projectId,
user_id=context.user_id,
name=data.name,
filter=data.filter,
is_public=data.is_public)
@app.get('/{projectId}/funnels', tags=["funnels"])
def get_funnels(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": funnels.get_by_user(project_id=projectId,
user_id=context.user_id,
range_value=None,
start_date=None,
end_date=None,
details=False)}
@app.get('/{projectId}/funnels/details', tags=["funnels"])
def get_funnels_with_details(projectId: int, rangeValue: str = None, startDate: int = None, endDate: int = None,
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": funnels.get_by_user(project_id=projectId,
user_id=context.user_id,
range_value=rangeValue,
start_date=startDate,
end_date=endDate,
details=True)}
@app.get('/{projectId}/funnels/issue_types', tags=["funnels"])
def get_possible_issue_types(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": funnels.get_possible_issue_types(project_id=projectId)}
@app.get('/{projectId}/funnels/{funnelId}/insights', tags=["funnels"])
def get_funnel_insights(projectId: int, funnelId: int, rangeValue: str = None, startDate: int = None,
endDate: int = None, context: schemas.CurrentContext = Depends(OR_context)):
return funnels.get_top_insights(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
range_value=rangeValue, start_date=startDate, end_date=endDate)
@app.post('/{projectId}/funnels/{funnelId}/insights', tags=["funnels"])
def get_funnel_insights_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelInsightsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return funnels.get_top_insights_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
data=data)
@app.get('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"])
def get_funnel_issues(projectId: int, funnelId, rangeValue: str = None, startDate: int = None, endDate: int = None,
context: schemas.CurrentContext = Depends(OR_context)):
return funnels.get_issues(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
range_value=rangeValue, start_date=startDate, end_date=endDate)
@app.post('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"])
def get_funnel_issues_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": funnels.get_issues_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
data=data)}
@app.get('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"])
def get_funnel_sessions(projectId: int, funnelId: int, rangeValue: str = None, startDate: int = None,
endDate: int = None, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": funnels.get_sessions(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
range_value=rangeValue,
start_date=startDate,
end_date=endDate)}
@app.post('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"])
def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": funnels.get_sessions_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
data=data)}
@app.get('/{projectId}/funnels/issues/{issueId}/sessions', tags=["funnels"])
def get_funnel_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None,
context: schemas.CurrentContext = Depends(OR_context)):
issue = issues.get(project_id=projectId, issue_id=issueId)
if issue is None:
return {"errors": ["issue not found"]}
return {
"data": {"sessions": sessions.search_by_issue(user_id=context.user_id, project_id=projectId, issue=issue,
start_date=startDate, end_date=endDate),
"issue": issue}}
@app.post('/{projectId}/funnels/{funnelId}/issues/{issueId}/sessions', tags=["funnels"])
def get_funnel_issue_sessions(projectId: int, funnelId: int, issueId: str,
data: schemas.FunnelSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = funnels.search_by_issue(project_id=projectId, user_id=context.user_id, issue_id=issueId,
funnel_id=funnelId, data=data)
if "errors" in data:
return data
if data.get("issue") is None:
data["issue"] = issues.get(project_id=projectId, issue_id=issueId)
return {
"data": data
}
@app.get('/{projectId}/funnels/{funnelId}', tags=["funnels"])
def get_funnel(projectId: int, funnelId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = funnels.get(funnel_id=funnelId, project_id=projectId, user_id=context.user_id)
if data is None:
return {"errors": ["funnel not found"]}
return {"data": data}
@app.post('/{projectId}/funnels/{funnelId}', tags=["funnels"])
def edit_funnel(projectId: int, funnelId: int, data: schemas.UpdateFunnelSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return funnels.update(funnel_id=funnelId,
user_id=context.user_id,
name=data.name,
filter=data.filter.dict(),
is_public=data.is_public,
project_id=projectId)
@app.delete('/{projectId}/funnels/{funnelId}', tags=["funnels"])
def delete_filter(projectId: int, funnelId: int, context: schemas.CurrentContext = Depends(OR_context)):
return funnels.delete(user_id=context.user_id, funnel_id=funnelId, project_id=projectId)
@app_apikey.put('/{projectKey}/sourcemaps/', tags=["sourcemaps"])
@app_apikey.put('/{projectKey}/sourcemaps', tags=["sourcemaps"])
def sign_sourcemap_for_upload(projectKey: str, data: schemas.SourcemapUploadPayloadSchema = Body(...),

View file

@ -551,6 +551,8 @@ class _SessionSearchEventRaw(__MixedSearchFilter):
@root_validator(pre=True)
def transform(cls, values):
if values.get("type") is None:
return values
values["type"] = {
"CLICK": EventType.click.value,
"INPUT": EventType.input.value,
@ -634,6 +636,8 @@ class SessionSearchFilterSchema(__MixedSearchFilter):
@root_validator(pre=True)
def transform(cls, values):
if values.get("type") is None:
return values
values["type"] = {
"USEROS": FilterType.user_os.value,
"USERBROWSER": FilterType.user_browser.value,
@ -783,12 +787,6 @@ class FunnelSchema(BaseModel):
alias_generator = attribute_to_camel_case
class UpdateFunnelSchema(FunnelSchema):
name: Optional[str] = Field(default=None)
filter: Optional[FunnelSearchPayloadSchema] = Field(default=None)
is_public: Optional[bool] = Field(default=None)
class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema):
# class FunnelInsightsPayloadSchema(SessionsSearchPayloadSchema):
sort: Optional[str] = Field(None)
@ -847,7 +845,7 @@ class CardSeriesFilterSchema(SearchErrorsSchema):
startDate: Optional[int] = Field(default=None)
endDate: Optional[int] = Field(default=None)
sort: Optional[str] = Field(default=None)
order: Optional[str] = Field(default=None)
order: SortOrderType = Field(default=SortOrderType.desc)
group_by_user: Optional[bool] = Field(default=False, const=True)
@ -1049,7 +1047,9 @@ class CreateCardSchema(CardChartSchema):
assert values.get("metric_value") is None or len(values.get("metric_value")) == 0, \
f"metricValue is only available for metricOf:{MetricOfTable.issues}"
elif values.get("metric_type") == MetricType.funnel:
assert len(values["series"]) == 1, f"must have only 1 series for metricType:{MetricType.funnel}"
pass
# allow UI sot send empty series for funnel
# assert len(values["series"]) == 1, f"must have only 1 series for metricType:{MetricType.funnel}"
# ignore this for now, let the UI send whatever he wants for metric_of
# assert isinstance(values.get("metric_of"), MetricOfTimeseries), \
# f"metricOf must be of type {MetricOfTimeseries} for metricType:{MetricType.funnel}"

2
ee/api/.gitignore vendored
View file

@ -194,7 +194,7 @@ Pipfile.lock
/chalicelib/core/errors_favorite.py
#exp /chalicelib/core/events.py
/chalicelib/core/events_ios.py
#exp /chalicelib/core/funnels.py
/chalicelib/core/funnels.py
/chalicelib/core/integration_base.py
/chalicelib/core/integration_base_issue.py
/chalicelib/core/integration_github.py

View file

@ -427,7 +427,7 @@ def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_ser
AND users.user_id = metrics.user_id
) AS owner ON (TRUE)
WHERE {" AND ".join(constraints)}
ORDER BY created_at {data.order}
ORDER BY created_at {data.order.value}
LIMIT %(limit)s OFFSET %(offset)s;""", params)
cur.execute(query)
rows = cur.fetchall()

View file

@ -1,380 +0,0 @@
import json
from typing import List
import chalicelib.utils.helper
import schemas
from chalicelib.core import significance
from chalicelib.utils import helper, pg_client
from chalicelib.utils import sql_helper as sh
from chalicelib.utils.TimeUTC import TimeUTC
from decouple import config
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
from chalicelib.core import sessions_legacy as sessions
else:
from chalicelib.core import sessions
REMOVE_KEYS = ["key", "_key", "startDate", "endDate"]
ALLOW_UPDATE_FOR = ["name", "filter"]
def filter_stages(stages: List[schemas._SessionSearchEventSchema]):
ALLOW_TYPES = [schemas.EventType.click, schemas.EventType.input,
schemas.EventType.location, schemas.EventType.custom,
schemas.EventType.click_ios, schemas.EventType.input_ios,
schemas.EventType.view_ios, schemas.EventType.custom_ios, ]
return [s for s in stages if s.type in ALLOW_TYPES and s.value is not None]
def __parse_events(f_events: List[dict]):
return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events]
def __unparse_events(f_events: List[schemas._SessionSearchEventSchema]):
return [e.dict() for e in f_events]
def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]):
if f_events is None:
return
events = []
for e in f_events:
if e.operator is None:
e.operator = schemas.SearchEventOperator._is
if not isinstance(e.value, list):
e.value = [e.value]
is_any = sh.isAny_opreator(e.operator)
if not is_any and isinstance(e.value, list) and len(e.value) == 0:
continue
events.append(e)
return events
def __transform_old_funnels(events):
for e in events:
if not isinstance(e.get("value"), list):
e["value"] = [e["value"]]
return events
def create(project_id, user_id, name, filter: schemas.FunnelSearchPayloadSchema, is_public):
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
filter.events = filter_stages(stages=filter.events)
with pg_client.PostgresClient() as cur:
query = cur.mogrify("""\
INSERT INTO public.funnels (project_id, user_id, name, filter,is_public)
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(filter)s::jsonb,%(is_public)s)
RETURNING *;""",
{"user_id": user_id, "project_id": project_id, "name": name,
"filter": json.dumps(filter.dict()),
"is_public": is_public})
cur.execute(
query
)
r = cur.fetchone()
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r = helper.dict_to_camel_case(r)
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
return {"data": r}
def update(funnel_id, user_id, project_id, name=None, filter=None, is_public=None):
s_query = []
if filter is not None:
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
s_query.append("filter = %(filter)s::jsonb")
if name is not None and len(name) > 0:
s_query.append("name = %(name)s")
if is_public is not None:
s_query.append("is_public = %(is_public)s")
if len(s_query) == 0:
return {"errors": ["Nothing to update"]}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
UPDATE public.funnels
SET {" , ".join(s_query)}
WHERE funnel_id=%(funnel_id)s
AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)
RETURNING *;""", {"user_id": user_id, "funnel_id": funnel_id, "name": name,
"filter": json.dumps(filter) if filter is not None else None, "is_public": is_public,
"project_id": project_id})
# print("--------------------")
# print(query)
# print("--------------------")
cur.execute(
query
)
r = cur.fetchone()
if r is None:
return {"errors": ["funnel not found"]}
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r = helper.dict_to_camel_case(r)
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
r["filter"] = helper.old_search_payload_to_flat(r["filter"])
return {"data": r}
def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date=None, details=False):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""\
SELECT funnel_id, project_id, user_id, name, created_at, deleted_at, is_public
{",filter" if details else ""}
FROM public.funnels
WHERE project_id = %(project_id)s
AND funnels.deleted_at IS NULL
AND (funnels.user_id = %(user_id)s OR funnels.is_public);""",
{"project_id": project_id, "user_id": user_id}
)
)
rows = cur.fetchall()
rows = helper.list_to_camel_case(rows)
for row in rows:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
if details:
row["filter"]["events"] = filter_stages(__parse_events(row["filter"]["events"]))
if row.get("filter") is not None and row["filter"].get("events") is not None:
row["filter"]["events"] = __transform_old_funnels(__unparse_events(row["filter"]["events"]))
get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date,
end_date=end_date)
counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(row["filter"]),
project_id=project_id, user_id=None, count_only=True)
row["sessionsCount"] = counts["countSessions"]
row["usersCount"] = counts["countUsers"]
filter_clone = dict(row["filter"])
overview = significance.get_overview(filter_d=row["filter"], project_id=project_id)
row["stages"] = overview["stages"]
row.pop("filter")
row["stagesCount"] = len(row["stages"])
# TODO: ask david to count it alone
row["criticalIssuesCount"] = overview["criticalIssuesCount"]
row["missedConversions"] = 0 if len(row["stages"]) < 2 \
else row["stages"][0]["sessionsCount"] - row["stages"][-1]["sessionsCount"]
row["filter"] = helper.old_search_payload_to_flat(filter_clone)
return rows
def get_possible_issue_types(project_id):
return [{"type": t, "title": helper.get_issue_title(t)} for t in
['click_rage', 'dead_click', 'excessive_scrolling',
'bad_request', 'missing_resource', 'memory', 'cpu',
'slow_resource', 'slow_page_load', 'crash', 'custom_event_error',
'js_error']]
def get_start_end_time(filter_d, range_value, start_date, end_date):
if start_date is not None and end_date is not None:
filter_d["startDate"], filter_d["endDate"] = start_date, end_date
elif range_value is not None and len(range_value) > 0:
filter_d["rangeValue"] = range_value
filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(range_value)
else:
filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(filter_d["rangeValue"])
def delete(project_id, funnel_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
UPDATE public.funnels
SET deleted_at = timezone('utc'::text, now())
WHERE project_id = %(project_id)s
AND funnel_id = %(funnel_id)s
AND (user_id = %(user_id)s OR is_public);""",
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id})
)
return {"data": {"state": "success"}}
def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=None, end_date=None):
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
return sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(f["filter"]),
project_id=project_id,
user_id=user_id)
def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema):
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.range_value,
start_date=data.startDate, end_date=data.endDate)
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
return sessions.search_sessions(data=data, project_id=project_id,
user_id=user_id)
def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=f["filter"], project_id=project_id)
insights = helper.list_to_camel_case(insights)
if len(insights) > 0:
# fix: this fix for huge drop count
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
total_drop_due_to_issues = insights[0]["sessionsCount"]
# end fix
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
return {"data": {"stages": insights,
"totalDropDueToIssues": total_drop_due_to_issues}}
def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema):
data.events = filter_stages(__parse_events(data.events))
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
start_date=data.startDate,
end_date=data.endDate)
data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"])
data.events = __fix_stages(data.events)
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
insights = helper.list_to_camel_case(insights)
if len(insights) > 0:
# fix: this fix for huge drop count
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
total_drop_due_to_issues = insights[0]["sessionsCount"]
# end fix
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
return {"data": {"stages": insights,
"totalDropDueToIssues": total_drop_due_to_issues}}
# def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema):
def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema):
data.events = filter_stages(__parse_events(data.events))
data.events = __fix_stages(data.events)
if len(data.events) == 0:
return {"stages": [], "totalDropDueToIssues": 0}
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
insights = helper.list_to_camel_case(insights)
if len(insights) > 0:
# TODO: check if this correct
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
if len(insights) == 0:
total_drop_due_to_issues = 0
else:
total_drop_due_to_issues = insights[0]["sessionsCount"] - insights[-1]["sessionsCount"]
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
return {"stages": insights,
"totalDropDueToIssues": total_drop_due_to_issues}
def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
return {"data": {
"issues": helper.dict_to_camel_case(significance.get_issues_list(filter_d=f["filter"], project_id=project_id))
}}
def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema):
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
start_date=data.startDate,
end_date=data.endDate)
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
if len(data.events) < 2:
return {"issues": []}
return {
"issues": helper.dict_to_camel_case(
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
last_stage=len(data.events)))}
# def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema):
def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema):
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) < 0:
return {"issues": []}
return {
"issues": helper.dict_to_camel_case(
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
last_stage=len(data.events)))}
def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""\
SELECT
*
FROM public.funnels
WHERE project_id = %(project_id)s
AND deleted_at IS NULL
AND funnel_id = %(funnel_id)s
AND (user_id = %(user_id)s OR is_public);""",
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id}
)
)
f = helper.dict_to_camel_case(cur.fetchone())
if f is None:
return None
if f.get("filter") is not None and f["filter"].get("events") is not None:
f["filter"]["events"] = __transform_old_funnels(f["filter"]["events"])
f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"])
f["filter"]["events"] = __parse_events(f["filter"]["events"])
f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
if fix_stages:
f["filter"]["events"] = __fix_stages(f["filter"]["events"])
f["filter"]["events"] = [e.dict() for e in f["filter"]["events"]]
if flatten:
f["filter"] = helper.old_search_payload_to_flat(f["filter"])
return f
def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.FunnelSearchPayloadSchema, range_value=None,
start_date=None, end_date=None):
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
data.startDate = data.startDate if data.startDate is not None else start_date
data.endDate = data.endDate if data.endDate is not None else end_date
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=data.startDate,
end_date=data.endDate)
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data) \
.get("issues", {})
issues = issues.get("significant", []) + issues.get("insignificant", [])
issue = None
for i in issues:
if i.get("issueId", "") == issue_id:
issue = i
break
return {"sessions": sessions.search_sessions(user_id=user_id, project_id=project_id, issue=issue,
data=data) if issue is not None else {"total": 0, "sessions": []},
# "stages": helper.list_to_camel_case(insights),
# "totalDropDueToIssues": total_drop_due_to_issues,
"issue": issue}

View file

@ -9,49 +9,49 @@ def get_global_integrations_status(tenant_id, user_id, project_id):
SELECT EXISTS((SELECT 1
FROM public.oauth_authentication
WHERE user_id = %(user_id)s
AND provider = 'github')) AS {schemas.IntegrationType.github},
AND provider = 'github')) AS {schemas.IntegrationType.github.value},
EXISTS((SELECT 1
FROM public.jira_cloud
WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.jira},
WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.jira.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='bugsnag')) AS {schemas.IntegrationType.bugsnag},
AND provider='bugsnag')) AS {schemas.IntegrationType.bugsnag.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='cloudwatch')) AS {schemas.IntegrationType.cloudwatch},
AND provider='cloudwatch')) AS {schemas.IntegrationType.cloudwatch.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='datadog')) AS {schemas.IntegrationType.datadog},
AND provider='datadog')) AS {schemas.IntegrationType.datadog.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='newrelic')) AS {schemas.IntegrationType.newrelic},
AND provider='newrelic')) AS {schemas.IntegrationType.newrelic.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='rollbar')) AS {schemas.IntegrationType.rollbar},
AND provider='rollbar')) AS {schemas.IntegrationType.rollbar.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='sentry')) AS {schemas.IntegrationType.sentry},
AND provider='sentry')) AS {schemas.IntegrationType.sentry.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='stackdriver')) AS {schemas.IntegrationType.stackdriver},
AND provider='stackdriver')) AS {schemas.IntegrationType.stackdriver.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='sumologic')) AS {schemas.IntegrationType.sumologic},
AND provider='sumologic')) AS {schemas.IntegrationType.sumologic.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='elasticsearch')) AS {schemas.IntegrationType.elasticsearch},
AND provider='elasticsearch')) AS {schemas.IntegrationType.elasticsearch.value},
EXISTS((SELECT 1
FROM public.webhooks
WHERE type='slack' AND tenant_id=%(tenant_id)s)) AS {schemas.IntegrationType.slack};""",
WHERE type='slack' AND tenant_id=%(tenant_id)s)) AS {schemas.IntegrationType.slack.value};""",
{"user_id": user_id, "tenant_id": tenant_id, "project_id": project_id})
)
current_integrations = cur.fetchone()

View file

@ -1,3 +1,5 @@
from decouple import config
import schemas
from chalicelib.core import users
from chalicelib.utils import email_helper, captcha, helper
@ -15,6 +17,8 @@ def reset(data: schemas.ForgetPasswordPayloadSchema):
# ---FOR SSO
if a_user.get("origin") is not None and a_user.get("hasPassword", False) is False:
return {"errors": ["Please use your SSO to login"]}
if config("enforce_SSO", cast=bool, default=False) and not a_user["superAdmin"]:
return {"errors": ["Please use your SSO to login, enforced by admin"]}
# ----------
invitation_link = users.generate_new_invitation(user_id=a_user["id"])
email_helper.send_forgot_password(recipient=data.email, invitation_link=invitation_link)

View file

@ -153,9 +153,9 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
elif data.group_by_user:
g_sort = "count(full_sessions)"
if data.order is None:
data.order = schemas.SortOrderType.desc
data.order = schemas.SortOrderType.desc.value
else:
data.order = data.order.upper()
data.order = data.order.value
if data.sort is not None and data.sort != 'sessionsCount':
sort = helper.key_to_snake_case(data.sort)
g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})"
@ -188,7 +188,9 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
full_args)
else:
if data.order is None:
data.order = schemas.SortOrderType.desc
data.order = schemas.SortOrderType.desc.value
else:
data.order = data.order.value
sort = 'session_id'
if data.sort is not None and data.sort != "session_id":
# sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
@ -204,9 +206,9 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
ORDER BY s.session_id desc) AS filtred_sessions
ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""",
full_args)
# print("--------------------")
# print(main_query)
# print("--------------------")
print("--------------------")
print(main_query)
print("--------------------")
try:
cur.execute(main_query)
except Exception as err:
@ -799,7 +801,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)}
event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " +
sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator.value} %({e_k})s",
event.source, value_key=e_k))
elif event_type == schemas.PerformanceEventType.time_between_events:
event_from = event_from % f"{getattr(events.EventType, event.value[0].type).table} AS main INNER JOIN {getattr(events.EventType, event.value[1].type).table} AS main2 USING(session_id) "
@ -837,7 +839,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
e_k += "_custom"
full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)}
event_where.append(
sh.multi_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator} %({e_k})s",
sh.multi_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator.value} %({e_k})s",
event.source, value_key=e_k))
elif event_type == schemas.EventType.request_details:

View file

@ -237,9 +237,9 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
elif data.group_by_user:
g_sort = "count(full_sessions)"
if data.order is None:
data.order = schemas.SortOrderType.desc
data.order = schemas.SortOrderType.desc.value
else:
data.order = data.order.upper()
data.order = data.order.value
if data.sort is not None and data.sort != 'sessionsCount':
sort = helper.key_to_snake_case(data.sort)
g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})"
@ -266,7 +266,9 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
full_args)
else:
if data.order is None:
data.order = schemas.SortOrderType.desc
data.order = schemas.SortOrderType.desc.value
else:
data.order = data.order.value
sort = 'session_id'
if data.sort is not None and data.sort != "session_id":
# sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
@ -961,7 +963,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)}
event_where.append(f"isNotNull({tname}.{colname}) AND {tname}.{colname}>0 AND " +
_multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
_multiple_conditions(f"{tname}.{colname} {event.sourceOperator.value} %({e_k})s",
event.source, value_key=e_k))
events_conditions[-1]["condition"].append(event_where[-1])
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
@ -984,7 +986,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)}
event_where.append(f"isNotNull({tname}.{colname}) AND {tname}.{colname}>0 AND " +
_multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
_multiple_conditions(f"{tname}.{colname} {event.sourceOperator.value} %({e_k})s",
event.source, value_key=e_k))
events_conditions[-1]["condition"].append(event_where[-1])
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
@ -1036,7 +1038,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
# _multiple_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator} %({e_k})s",
# event.source, value_key=e_k))
# events_conditions[-2]["time"] = f"(?t{event.sourceOperator} %({e_k})s)"
events_conditions[-2]["time"] = _multiple_conditions(f"?t{event.sourceOperator}%({e_k})s", event.source,
events_conditions[-2]["time"] = _multiple_conditions(f"?t{event.sourceOperator.value}%({e_k})s", event.source,
value_key=e_k)
event_index += 1
# TODO: no isNot for RequestDetails

View file

@ -20,12 +20,14 @@ def _table_where(table, index, value):
def _sum_table_index(table, index):
print(f'index {index}')
s = 0
count = 0
for row in table:
v = row[index]
if v is None:
continue
print(v)
s += v
count += 1
return s
@ -213,21 +215,31 @@ def query_most_errors_by_period(project_id, start_time, end_time,
table_hh1, table_hh2, columns, this_period_errors, last_period_errors = __get_two_values(res, time_index='hh',
name_index='names')
del res
print(table_hh1)
print('\n')
print(table_hh2)
print('\n')
new_errors = [x for x in this_period_errors if x not in last_period_errors]
common_errors = [x for x in this_period_errors if x not in new_errors]
sessions_idx = columns.index('sessions')
names_idx = columns.index('names')
print(_table_where(table_hh1, names_idx, this_period_errors[0]))
percentage_errors = dict()
total = _sum_table_index(table_hh1, sessions_idx)
# error_increase = dict()
new_error_values = dict()
error_values = dict()
for n in this_period_errors:
if n is None:
continue
percentage_errors[n] = _sum_table_index(_table_where(table_hh1, names_idx, n), sessions_idx)
new_error_values[n] = _sum_table_index(_table_where(table_hh1, names_idx, n), names_idx)
new_error_values[n] = _sum_table_index(_table_where(table_hh1, names_idx, n), sessions_idx)
for n in common_errors:
if n is None:
continue
old_errors = _sum_table_index(_table_where(table_hh2, names_idx, n), names_idx)
if old_errors == 0:
continue

View file

@ -179,10 +179,10 @@ def get_all(tenant_id, data: schemas_ee.TrailSearchPayloadSchema):
COALESCE(JSONB_AGG(full_traces ORDER BY rn)
FILTER (WHERE rn > %(p_start)s AND rn <= %(p_end)s), '[]'::JSONB) AS sessions
FROM (SELECT traces.*,users.email,users.name AS username,
ROW_NUMBER() OVER (ORDER BY traces.created_at {data.order}) AS rn
ROW_NUMBER() OVER (ORDER BY traces.created_at {data.order.value}) AS rn
FROM traces LEFT JOIN users USING (user_id)
WHERE {" AND ".join(conditions)}
ORDER BY traces.created_at {data.order}) AS full_traces;""", params)
ORDER BY traces.created_at {data.order.value}) AS full_traces;""", params)
)
rows = cur.fetchone()
return helper.dict_to_camel_case(rows)

View file

@ -741,6 +741,9 @@ def authenticate(email, password, for_change_password=False):
if for_change_password:
return True
r = helper.dict_to_camel_case(r)
if config("enforce_SSO", cast=bool, default=False) and not r["superAdmin"]:
return {"errors": ["must sign-in with SSO, enforced by admin"]}
jwt_iat = change_jwt_iat(r['userId'])
iat = TimeUTC.datetime_to_timestamp(jwt_iat)
return {

View file

@ -14,7 +14,7 @@ rm -rf ./chalicelib/core/errors_favorite.py
#exp rm -rf ./chalicelib/core/events.py
rm -rf ./chalicelib/core/events_ios.py
rm -rf ./chalicelib/core/dashboards.py
#exp rm -rf ./chalicelib/core/funnels.py
rm -rf ./chalicelib/core/funnels.py
rm -rf ./chalicelib/core/integration_base.py
rm -rf ./chalicelib/core/integration_base_issue.py
rm -rf ./chalicelib/core/integration_github.py

View file

@ -307,7 +307,7 @@ def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
@app.post('/{projectId}/heatmaps/url', tags=["heatmaps"], dependencies=[OR_scope(Permissions.session_replay)])
def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": heatmaps.get_by_url(project_id=projectId, data=data.dict())}
return {"data": heatmaps.get_by_url(project_id=projectId, data=data)}
@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"],

View file

@ -79,7 +79,15 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema):
user_id: Optional[int] = Field(default=None)
query: Optional[str] = Field(default=None)
action: Optional[str] = Field(default=None)
order: Literal["asc", "desc"] = Field(default="desc")
order: schemas.SortOrderType = Field(default=schemas.SortOrderType.desc)
@root_validator(pre=True)
def transform_order(cls, values):
if values.get("order") is None:
values["order"] = schemas.SortOrderType.desc
else:
values["order"] = values["order"].upper()
return values
class Config:
alias_generator = schemas.attribute_to_camel_case

View file

@ -325,6 +325,11 @@ $$
LANGUAGE plpgsql;
DROP FUNCTION get_new_filter_key;
DROP FUNCTION get_new_event_filter_key;
DROP FUNCTION get_new_event_key;
DROP TABLE IF EXISTS public.funnels;
COMMIT;
CREATE INDEX CONCURRENTLY IF NOT EXISTS clicks_selector_idx ON events.clicks (selector);

View file

@ -109,7 +109,6 @@ $$
('dashboards'),
('dashboard_widgets'),
('errors'),
('funnels'),
('integrations'),
('issues'),
('jira_cloud'),
@ -336,21 +335,6 @@ $$
);
CREATE TABLE IF NOT EXISTS funnels
(
funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
name text NOT NULL,
filter jsonb NOT NULL,
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
deleted_at timestamp,
is_public boolean NOT NULL DEFAULT False
);
CREATE INDEX IF NOT EXISTS funnels_user_id_is_public_idx ON public.funnels (user_id, is_public);
CREATE INDEX IF NOT EXISTS funnels_project_id_idx ON public.funnels (project_id);
IF NOT EXISTS(SELECT *
FROM pg_type typ
WHERE typ.typname = 'announcement_type') THEN

View file

@ -148,6 +148,7 @@ chalice:
# idp_sls_url: ''
# idp_name: ''
# idp_tenantKey: ''
# enforce_SSO: 'false'
# Below is an example on how to override values
# chartname:

View file

@ -300,6 +300,7 @@ $$
$$
LANGUAGE plpgsql;
DROP TABLE IF EXISTS public.funnels;
COMMIT;
CREATE INDEX CONCURRENTLY IF NOT EXISTS clicks_selector_idx ON events.clicks (selector);

View file

@ -249,22 +249,6 @@ $$
);
CREATE TABLE funnels
(
funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
name text NOT NULL,
filter jsonb NOT NULL,
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
deleted_at timestamp,
is_public boolean NOT NULL DEFAULT False
);
CREATE INDEX funnels_user_id_is_public_idx ON public.funnels (user_id, is_public);
CREATE INDEX funnels_project_id_idx ON public.funnels (project_id);
CREATE TYPE announcement_type AS ENUM ('notification', 'alert');
CREATE TABLE announcements