feat(api): custom metrics rename title to name
feat(api): save search feat(api): refactored funnels feat(api): removed funnels EE feat(DB): saved search table feat(DB): custom metrics rename title to name feat(DB): funnels new index
This commit is contained in:
parent
74694eb5b4
commit
ad6caaa225
11 changed files with 266 additions and 314 deletions
|
|
@ -37,12 +37,12 @@ def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema):
|
|||
data.series = None
|
||||
params = {"user_id": user_id, "project_id": project_id, **data.dict(), **_data}
|
||||
query = cur.mogrify(f"""\
|
||||
WITH m AS (INSERT INTO metrics (project_id, user_id, title)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(title)s)
|
||||
WITH m AS (INSERT INTO metrics (project_id, user_id, name)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s)
|
||||
RETURNING *)
|
||||
INSERT
|
||||
INTO metric_series(metric_id, index, title, filter)
|
||||
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(title_{i})s, %(filter_{i})s::jsonb)"
|
||||
INTO metric_series(metric_id, index, name, filter)
|
||||
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)"
|
||||
for i in range(series_len)])}
|
||||
RETURNING metric_id;""", params)
|
||||
|
||||
|
|
@ -69,13 +69,14 @@ def __get_series_id(metric_id):
|
|||
return [r["series_id"] for r in rows]
|
||||
|
||||
|
||||
def update(metric_id, user_id, data: schemas.UpdateCustomMetricsSchema):
|
||||
def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSchema):
|
||||
series_ids = __get_series_id(metric_id)
|
||||
n_series = []
|
||||
d_series_ids = []
|
||||
u_series = []
|
||||
u_series_ids = []
|
||||
params = {"metric_id": metric_id, "is_public": data.is_public, "title": data.title}
|
||||
params = {"metric_id": metric_id, "is_public": data.is_public, "name": data.name,
|
||||
"user_id": user_id, "project_id": project_id}
|
||||
for i, s in enumerate(data.series):
|
||||
prefix = "u_"
|
||||
if s.series_id is None:
|
||||
|
|
@ -98,18 +99,18 @@ def update(metric_id, user_id, data: schemas.UpdateCustomMetricsSchema):
|
|||
sub_queries = []
|
||||
if len(n_series) > 0:
|
||||
sub_queries.append(f"""\
|
||||
n AS (INSERT INTO metric_series (metric_id, index, title, filter)
|
||||
VALUES {",".join([f"(%(metric_id)s, %(n_index_{s['i']})s, %(n_title_{s['i']})s, %(n_filter_{s['i']})s::jsonb)"
|
||||
n AS (INSERT INTO metric_series (metric_id, index, name, filter)
|
||||
VALUES {",".join([f"(%(metric_id)s, %(n_index_{s['i']})s, %(n_name_{s['i']})s, %(n_filter_{s['i']})s::jsonb)"
|
||||
for s in n_series])}
|
||||
RETURNING 1)""")
|
||||
if len(u_series) > 0:
|
||||
sub_queries.append(f"""\
|
||||
u AS (UPDATE metric_series
|
||||
SET title=series.title,
|
||||
SET name=series.name,
|
||||
filter=series.filter,
|
||||
index=series.filter.index
|
||||
FROM (VALUES {",".join([f"(%(u_series_id_{s['i']})s,%(u_index_{s['i']})s,%(u_title_{s['i']})s,%(u_filter_{s['i']})s::jsonb)"
|
||||
for s in n_series])}) AS series(series_id, index, title, filter)
|
||||
FROM (VALUES {",".join([f"(%(u_series_id_{s['i']})s,%(u_index_{s['i']})s,%(u_name_{s['i']})s,%(u_filter_{s['i']})s::jsonb)"
|
||||
for s in n_series])}) AS series(series_id, index, name, filter)
|
||||
WHERE metric_id =%(metric_id)s AND series_id=series.series_id
|
||||
RETURNING 1)""")
|
||||
if len(d_series_ids) > 0:
|
||||
|
|
@ -119,7 +120,10 @@ def update(metric_id, user_id, data: schemas.UpdateCustomMetricsSchema):
|
|||
query = cur.mogrify(f"""\
|
||||
{"WITH " if len(sub_queries) > 0 else ""}{",".join(sub_queries)}
|
||||
UPDATE metrics
|
||||
SET title = %(title)s, is_public= %(is_public)s WHERE metric_id = %(metric_id)s
|
||||
SET name = %(name)s, is_public= %(is_public)s
|
||||
WHERE metric_id = %(metric_id)s
|
||||
AND project_id = %(project_id)s
|
||||
AND (user_id = %(user_id)s OR is_public)
|
||||
RETURNING metric_id;""", params)
|
||||
cur.execute(
|
||||
query
|
||||
|
|
@ -161,7 +165,8 @@ def delete(project_id, metric_id, user_id):
|
|||
UPDATE public.metrics
|
||||
SET deleted_at = timezone('utc'::text, now())
|
||||
WHERE project_id = %(project_id)s
|
||||
AND metric_id = %(metric_id)s;""",
|
||||
AND metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id})
|
||||
)
|
||||
|
||||
|
|
@ -188,5 +193,5 @@ def get(metric_id, project_id, user_id):
|
|||
)
|
||||
)
|
||||
row = cur.fetchone()
|
||||
row["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
|
||||
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
|
||||
return helper.dict_to_camel_case(row)
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ def create(project_id, user_id, name, filter, is_public):
|
|||
return {"data": r}
|
||||
|
||||
|
||||
def update(funnel_id, user_id, name=None, filter=None, is_public=None):
|
||||
def update(funnel_id, user_id, project_id, name=None, filter=None, is_public=None):
|
||||
s_query = []
|
||||
if filter is not None:
|
||||
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
|
||||
|
|
@ -56,9 +56,10 @@ def update(funnel_id, user_id, name=None, filter=None, is_public=None):
|
|||
UPDATE public.funnels
|
||||
SET {" , ".join(s_query)}
|
||||
WHERE funnel_id=%(funnel_id)s
|
||||
RETURNING *;""",
|
||||
{"user_id": user_id, "funnel_id": funnel_id, "name": name,
|
||||
"filter": json.dumps(filter) if filter is not None else None, "is_public": is_public})
|
||||
AND project_id = %(project_id)s
|
||||
AND (user_id = %(user_id)s OR is_public)
|
||||
RETURNING *;""", {"user_id": user_id, "funnel_id": funnel_id, "name": name,
|
||||
"filter": json.dumps(filter) if filter is not None else None, "is_public": is_public})
|
||||
# print("--------------------")
|
||||
# print(query)
|
||||
# print("--------------------")
|
||||
|
|
@ -74,13 +75,12 @@ def update(funnel_id, user_id, name=None, filter=None, is_public=None):
|
|||
|
||||
def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date=None, details=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
team_query = ""
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""\
|
||||
SELECT DISTINCT ON (funnels.funnel_id) funnel_id,project_id, user_id, name, created_at, deleted_at, is_public
|
||||
SELECT funnel_id, project_id, user_id, name, created_at, deleted_at, is_public
|
||||
{",filter" if details else ""}
|
||||
FROM public.funnels {team_query}
|
||||
FROM public.funnels
|
||||
WHERE project_id = %(project_id)s
|
||||
AND funnels.deleted_at IS NULL
|
||||
AND (funnels.user_id = %(user_id)s OR funnels.is_public);""",
|
||||
|
|
@ -135,7 +135,8 @@ def delete(project_id, funnel_id, user_id):
|
|||
UPDATE public.funnels
|
||||
SET deleted_at = timezone('utc'::text, now())
|
||||
WHERE project_id = %(project_id)s
|
||||
AND funnel_id = %(funnel_id)s;""",
|
||||
AND funnel_id = %(funnel_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id})
|
||||
)
|
||||
|
||||
|
|
@ -220,7 +221,7 @@ def get_issues_on_the_fly(funnel_id, project_id, data):
|
|||
last_stage=last_stage))}
|
||||
|
||||
|
||||
def get(funnel_id, project_id):
|
||||
def get(funnel_id, project_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
|
|
@ -230,8 +231,9 @@ def get(funnel_id, project_id):
|
|||
FROM public.funnels
|
||||
WHERE project_id = %(project_id)s
|
||||
AND deleted_at IS NULL
|
||||
AND funnel_id = %(funnel_id)s;""",
|
||||
{"funnel_id": funnel_id, "project_id": project_id}
|
||||
AND funnel_id = %(funnel_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
)
|
||||
|
||||
|
|
@ -247,7 +249,7 @@ def get(funnel_id, project_id):
|
|||
@dev.timed
|
||||
def search_by_issue(user_id, project_id, funnel_id, issue_id, data, range_value=None, start_date=None, end_date=None):
|
||||
if len(data.get("events", [])) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id)
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=data.get('startDate', start_date),
|
||||
|
|
|
|||
115
api/chalicelib/core/saved_search.py
Normal file
115
api/chalicelib/core/saved_search.py
Normal file
|
|
@ -0,0 +1,115 @@
|
|||
import json
|
||||
|
||||
import schemas
|
||||
from chalicelib.utils import helper, pg_client
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
|
||||
def create(project_id, user_id, data: schemas.SavedSearchSchema):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
data = data.dict()
|
||||
data["filter"] = json.dumps(data["filter"])
|
||||
query = cur.mogrify("""\
|
||||
INSERT INTO public.searches (project_id, user_id, name, filter,is_public)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(filter)s::jsonb,%(is_public)s)
|
||||
RETURNING *;""", {"user_id": user_id, "project_id": project_id, **data})
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
r = cur.fetchone()
|
||||
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
|
||||
r = helper.dict_to_camel_case(r)
|
||||
return {"data": r}
|
||||
|
||||
|
||||
def update(search_id, project_id, user_id, data: schemas.SavedSearchSchema):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
data = data.dict()
|
||||
data["filter"] = json.dumps(data["filter"])
|
||||
query = cur.mogrify(f"""\
|
||||
UPDATE public.searches
|
||||
SET name = %(name)s,
|
||||
filter = %(filter)s,
|
||||
is_public = %(is_public)s
|
||||
WHERE search_id=%(search_id)s
|
||||
AND project_id= %(project_id)s
|
||||
AND (user_id = %(user_id)s OR is_public)
|
||||
RETURNING *;""", {"search_id": search_id, "project_id": project_id, "user_id": user_id, **data})
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
r = cur.fetchone()
|
||||
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
|
||||
r = helper.dict_to_camel_case(r)
|
||||
# r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
|
||||
return r
|
||||
|
||||
|
||||
def get_all(project_id, user_id, details=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
print(cur.mogrify(
|
||||
f"""\
|
||||
SELECT search_id, project_id, user_id, name, created_at, deleted_at, is_public
|
||||
{",filter" if details else ""}
|
||||
FROM public.searches
|
||||
WHERE project_id = %(project_id)s
|
||||
AND deleted_at IS NULL
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
{"project_id": project_id, "user_id": user_id}
|
||||
))
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""\
|
||||
SELECT search_id, project_id, user_id, name, created_at, deleted_at, is_public
|
||||
{",filter" if details else ""}
|
||||
FROM public.searches
|
||||
WHERE project_id = %(project_id)s
|
||||
AND deleted_at IS NULL
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
{"project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
)
|
||||
|
||||
rows = cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
for row in rows:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
return rows
|
||||
|
||||
|
||||
def delete(project_id, search_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.searches
|
||||
SET deleted_at = timezone('utc'::text, now())
|
||||
WHERE project_id = %(project_id)s
|
||||
AND search_id = %(search_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
{"search_id": search_id, "project_id": project_id, "user_id": user_id})
|
||||
)
|
||||
|
||||
return {"state": "success"}
|
||||
|
||||
|
||||
def get(search_id, project_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""SELECT
|
||||
*
|
||||
FROM public.searches
|
||||
WHERE project_id = %(project_id)s
|
||||
AND deleted_at IS NULL
|
||||
AND search_id = %(search_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
{"search_id": search_id, "project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
)
|
||||
|
||||
f = helper.dict_to_camel_case(cur.fetchone())
|
||||
if f is None:
|
||||
return None
|
||||
|
||||
f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"])
|
||||
return f
|
||||
|
|
@ -11,7 +11,7 @@ from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assig
|
|||
log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, errors, sessions, \
|
||||
log_tool_newrelic, announcements, log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, \
|
||||
assist, heatmaps, mobile, signup, tenants, errors_favorite_viewed, boarding, notifications, webhook, slack, users, \
|
||||
custom_metrics
|
||||
custom_metrics, saved_search
|
||||
from chalicelib.core.collaboration_slack import Slack
|
||||
from chalicelib.utils import email_helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
|
@ -752,7 +752,7 @@ def get_funnel_issue_sessions(projectId: int, funnelId: int, issueId: str,
|
|||
|
||||
@app.get('/{projectId}/funnels/{funnelId}', tags=["funnels"])
|
||||
def get_funnel(projectId: int, funnelId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = funnels.get(funnel_id=funnelId, project_id=projectId)
|
||||
data = funnels.get(funnel_id=funnelId, project_id=projectId, user_id=context.user_id)
|
||||
if data is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
return {"data": data}
|
||||
|
|
@ -766,7 +766,8 @@ def edit_funnel(projectId: int, funnelId: int, data: schemas.UpdateFunnelSchema
|
|||
user_id=context.user_id,
|
||||
name=data.name,
|
||||
filter=data.filter.dict(),
|
||||
is_public=data.is_public)
|
||||
is_public=data.is_public,
|
||||
project_id=projectId)
|
||||
|
||||
|
||||
@app.delete('/{projectId}/funnels/{funnelId}', tags=["funnels"])
|
||||
|
|
@ -1117,9 +1118,39 @@ def get_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentCo
|
|||
@app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
|
||||
def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCustomMetricsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": custom_metrics.update(user_id=context.user_id, metric_id=metric_id, data=data)}
|
||||
return {
|
||||
"data": custom_metrics.update(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)}
|
||||
|
||||
|
||||
@app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
|
||||
def delete_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": custom_metrics.delete(project_id=projectId, user_id=context.user_id, metric_id=metric_id)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/saved_search', tags=["savedSearch"])
|
||||
@app.put('/{projectId}/saved_search', tags=["savedSearch"])
|
||||
def add_saved_search(projectId: int, data: schemas.SavedSearchSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return saved_search.create(project_id=projectId, user_id=context.user_id, data=data)
|
||||
|
||||
|
||||
@app.get('/{projectId}/saved_search', tags=["savedSearch"])
|
||||
def get_saved_searches(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": saved_search.get_all(project_id=projectId, user_id=context.user_id)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/saved_search/{search_id}', tags=["savedSearch"])
|
||||
def get_saved_search(projectId: int, search_id: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": saved_search.get(project_id=projectId, search_id=search_id, user_id=context.user_id)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/saved_search/{search_id}', tags=["savedSearch"])
|
||||
@app.put('/{projectId}/saved_search/{search_id}', tags=["savedSearch"])
|
||||
def update_saved_search(projectId: int, search_id: int, data: schemas.SavedSearchSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": saved_search.update(user_id=context.user_id, search_id=search_id, data=data, project_id=projectId)}
|
||||
|
||||
|
||||
@app.delete('/{projectId}/saved_search/{search_id}', tags=["savedSearch"])
|
||||
def delete_saved_search(projectId: int, search_id: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": saved_search.delete(project_id=projectId, user_id=context.user_id, search_id=search_id)}
|
||||
|
|
|
|||
|
|
@ -592,7 +592,7 @@ class CustomMetricCreateSeriesSchema(BaseModel):
|
|||
|
||||
|
||||
class CreateCustomMetricsSchema(BaseModel):
|
||||
title: str = Field(...)
|
||||
name: str = Field(...)
|
||||
series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1)
|
||||
is_public: Optional[bool] = Field(False)
|
||||
|
||||
|
|
@ -618,3 +618,7 @@ class CustomMetricUpdateSeriesSchema(CustomMetricCreateSeriesSchema):
|
|||
|
||||
class UpdateCustomMetricsSchema(CreateCustomMetricsSchema):
|
||||
series: List[CustomMetricUpdateSeriesSchema] = Field(..., min_items=1)
|
||||
|
||||
|
||||
class SavedSearchSchema(FunnelSchema):
|
||||
pass
|
||||
|
|
|
|||
1
ee/api/.gitignore
vendored
1
ee/api/.gitignore
vendored
|
|
@ -186,6 +186,7 @@ Pipfile
|
|||
/chalicelib/core/errors_favorite_viewed.py
|
||||
/chalicelib/core/events.py
|
||||
/chalicelib/core/events_ios.py
|
||||
/chalicelib/core/funnels.py
|
||||
/chalicelib/core/integration_base.py
|
||||
/chalicelib/core/integration_base_issue.py
|
||||
/chalicelib/core/integration_github.py
|
||||
|
|
|
|||
|
|
@ -1,275 +0,0 @@
|
|||
import chalicelib.utils.helper
|
||||
from chalicelib.core import events, significance, sessions
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
from chalicelib.utils import helper, pg_client
|
||||
from chalicelib.utils import dev
|
||||
import json
|
||||
|
||||
REMOVE_KEYS = ["key", "_key", "startDate", "endDate"]
|
||||
|
||||
ALLOW_UPDATE_FOR = ["name", "filter"]
|
||||
|
||||
|
||||
def filter_stages(stages):
|
||||
ALLOW_TYPES = [events.event_type.CLICK.ui_type, events.event_type.INPUT.ui_type,
|
||||
events.event_type.LOCATION.ui_type, events.event_type.CUSTOM.ui_type,
|
||||
events.event_type.CLICK_IOS.ui_type, events.event_type.INPUT_IOS.ui_type,
|
||||
events.event_type.VIEW_IOS.ui_type, events.event_type.CUSTOM_IOS.ui_type, ]
|
||||
return [s for s in stages if s["type"] in ALLOW_TYPES and s.get("value") is not None]
|
||||
|
||||
|
||||
def create(project_id, user_id, name, filter, is_public):
|
||||
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
|
||||
filter["events"] = filter_stages(stages=filter.get("events", []))
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify("""\
|
||||
INSERT INTO public.funnels (project_id, user_id, name, filter,is_public)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(filter)s::jsonb,%(is_public)s)
|
||||
RETURNING *;""",
|
||||
{"user_id": user_id, "project_id": project_id, "name": name, "filter": json.dumps(filter),
|
||||
"is_public": is_public})
|
||||
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
r = cur.fetchone()
|
||||
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
|
||||
r = helper.dict_to_camel_case(r)
|
||||
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
|
||||
return {"data": r}
|
||||
|
||||
|
||||
def update(funnel_id, user_id, name=None, filter=None, is_public=None):
|
||||
s_query = []
|
||||
if filter is not None:
|
||||
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
|
||||
s_query.append("filter = %(filter)s::jsonb")
|
||||
if name is not None and len(name) > 0:
|
||||
s_query.append("name = %(name)s")
|
||||
if is_public is not None:
|
||||
s_query.append("is_public = %(is_public)s")
|
||||
if len(s_query) == 0:
|
||||
return {"errors": ["Nothing to update"]}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""\
|
||||
UPDATE public.funnels
|
||||
SET {" , ".join(s_query)}
|
||||
WHERE funnel_id=%(funnel_id)s
|
||||
RETURNING *;""",
|
||||
{"user_id": user_id, "funnel_id": funnel_id, "name": name,
|
||||
"filter": json.dumps(filter) if filter is not None else None, "is_public": is_public})
|
||||
# print("--------------------")
|
||||
# print(query)
|
||||
# print("--------------------")
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
r = cur.fetchone()
|
||||
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
|
||||
r = helper.dict_to_camel_case(r)
|
||||
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
|
||||
return {"data": r}
|
||||
|
||||
|
||||
def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date=None, details=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
team_query = """INNER JOIN
|
||||
(
|
||||
SELECT collaborators.user_id
|
||||
FROM public.users AS creator
|
||||
INNER JOIN public.users AS collaborators USING (tenant_id)
|
||||
WHERE creator.user_id=%(user_id)s
|
||||
) AS team USING (user_id)"""
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""\
|
||||
SELECT DISTINCT ON (funnels.funnel_id) funnel_id,project_id, user_id, name, created_at, deleted_at, is_public
|
||||
{",filter" if details else ""}
|
||||
FROM public.funnels {team_query}
|
||||
WHERE project_id = %(project_id)s
|
||||
AND funnels.deleted_at IS NULL
|
||||
AND (funnels.user_id = %(user_id)s OR funnels.is_public);""",
|
||||
{"project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
)
|
||||
|
||||
rows = cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
for row in rows:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
if details:
|
||||
row["filter"]["events"] = filter_stages(row["filter"]["events"])
|
||||
get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date,
|
||||
end_date=end_date)
|
||||
counts = sessions.search2_pg(data=row["filter"], project_id=project_id, user_id=None, count_only=True)
|
||||
row["sessionsCount"] = counts["countSessions"]
|
||||
row["usersCount"] = counts["countUsers"]
|
||||
overview = significance.get_overview(filter_d=row["filter"], project_id=project_id)
|
||||
row["stages"] = overview["stages"]
|
||||
row.pop("filter")
|
||||
row["stagesCount"] = len(row["stages"])
|
||||
# TODO: ask david to count it alone
|
||||
row["criticalIssuesCount"] = overview["criticalIssuesCount"]
|
||||
row["missedConversions"] = 0 if len(row["stages"]) < 2 \
|
||||
else row["stages"][0]["sessionsCount"] - row["stages"][-1]["sessionsCount"]
|
||||
return rows
|
||||
|
||||
|
||||
def get_possible_issue_types(project_id):
|
||||
return [{"type": t, "title": chalicelib.utils.helper.get_issue_title(t)} for t in
|
||||
['click_rage', 'dead_click', 'excessive_scrolling',
|
||||
'bad_request', 'missing_resource', 'memory', 'cpu',
|
||||
'slow_resource', 'slow_page_load', 'crash', 'custom_event_error',
|
||||
'js_error']]
|
||||
|
||||
|
||||
def get_start_end_time(filter_d, range_value, start_date, end_date):
|
||||
if start_date is not None and end_date is not None:
|
||||
filter_d["startDate"], filter_d["endDate"] = start_date, end_date
|
||||
elif range_value is not None and len(range_value) > 0:
|
||||
filter_d["rangeValue"] = range_value
|
||||
filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(range_value)
|
||||
else:
|
||||
filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(filter_d["rangeValue"])
|
||||
|
||||
|
||||
def delete(project_id, funnel_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.funnels
|
||||
SET deleted_at = timezone('utc'::text, now())
|
||||
WHERE project_id = %(project_id)s
|
||||
AND funnel_id = %(funnel_id)s;""",
|
||||
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id})
|
||||
)
|
||||
|
||||
return {"data": {"state": "success"}}
|
||||
|
||||
|
||||
def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=None, end_date=None):
|
||||
f = get(funnel_id=funnel_id, project_id=project_id)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
|
||||
return sessions.search2_pg(data=f["filter"], project_id=project_id, user_id=user_id)
|
||||
|
||||
|
||||
def get_sessions_on_the_fly(funnel_id, project_id, user_id, data):
|
||||
data["events"] = filter_stages(data.get("events", []))
|
||||
if len(data["events"]) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None),
|
||||
start_date=data.get('startDate', None),
|
||||
end_date=data.get('endDate', None))
|
||||
data = f["filter"]
|
||||
return sessions.search2_pg(data=data, project_id=project_id, user_id=user_id)
|
||||
|
||||
|
||||
def get_top_insights(project_id, funnel_id, range_value=None, start_date=None, end_date=None):
|
||||
f = get(funnel_id=funnel_id, project_id=project_id)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
|
||||
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=f["filter"], project_id=project_id)
|
||||
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
|
||||
return {"data": {"stages": helper.list_to_camel_case(insights),
|
||||
"totalDropDueToIssues": total_drop_due_to_issues}}
|
||||
|
||||
|
||||
def get_top_insights_on_the_fly(funnel_id, project_id, data):
|
||||
data["events"] = filter_stages(data.get("events", []))
|
||||
if len(data["events"]) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None),
|
||||
start_date=data.get('startDate', None),
|
||||
end_date=data.get('endDate', None))
|
||||
data = f["filter"]
|
||||
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data, project_id=project_id)
|
||||
if len(insights) > 0:
|
||||
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
|
||||
return {"data": {"stages": helper.list_to_camel_case(insights),
|
||||
"totalDropDueToIssues": total_drop_due_to_issues}}
|
||||
|
||||
|
||||
def get_issues(project_id, funnel_id, range_value=None, start_date=None, end_date=None):
|
||||
f = get(funnel_id=funnel_id, project_id=project_id)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
|
||||
return {"data": {
|
||||
"issues": helper.dict_to_camel_case(significance.get_issues_list(filter_d=f["filter"], project_id=project_id))
|
||||
}}
|
||||
|
||||
|
||||
@dev.timed
|
||||
def get_issues_on_the_fly(funnel_id, project_id, data):
|
||||
first_stage = data.get("firstStage")
|
||||
last_stage = data.get("lastStage")
|
||||
data["events"] = filter_stages(data.get("events", []))
|
||||
if len(data["events"]) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None),
|
||||
start_date=data.get('startDate', None),
|
||||
end_date=data.get('endDate', None))
|
||||
data = f["filter"]
|
||||
return {
|
||||
"issues": helper.dict_to_camel_case(
|
||||
significance.get_issues_list(filter_d=data, project_id=project_id, first_stage=first_stage,
|
||||
last_stage=last_stage))}
|
||||
|
||||
|
||||
def get(funnel_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""\
|
||||
SELECT
|
||||
*
|
||||
FROM public.funnels
|
||||
WHERE project_id = %(project_id)s
|
||||
AND deleted_at IS NULL
|
||||
AND funnel_id = %(funnel_id)s;""",
|
||||
{"funnel_id": funnel_id, "project_id": project_id}
|
||||
)
|
||||
)
|
||||
|
||||
f = helper.dict_to_camel_case(cur.fetchone())
|
||||
if f is None:
|
||||
return None
|
||||
|
||||
f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"])
|
||||
f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
|
||||
return f
|
||||
|
||||
|
||||
@dev.timed
|
||||
def search_by_issue(user_id, project_id, funnel_id, issue_id, data, range_value=None, start_date=None, end_date=None):
|
||||
if len(data.get("events", [])) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=data.get('startDate', start_date),
|
||||
end_date=data.get('endDate', end_date))
|
||||
data = f["filter"]
|
||||
|
||||
# insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data, project_id=project_id)
|
||||
issues = get_issues_on_the_fly(funnel_id=funnel_id, project_id=project_id, data=data).get("issues", {})
|
||||
issues = issues.get("significant", []) + issues.get("insignificant", [])
|
||||
issue = None
|
||||
for i in issues:
|
||||
if i.get("issueId", "") == issue_id:
|
||||
issue = i
|
||||
break
|
||||
return {"sessions": sessions.search2_pg(user_id=user_id, project_id=project_id, issue=issue,
|
||||
data=data) if issue is not None else {"total": 0, "sessions": []},
|
||||
# "stages": helper.list_to_camel_case(insights),
|
||||
# "totalDropDueToIssues": total_drop_due_to_issues,
|
||||
"issue": issue}
|
||||
|
|
@ -85,7 +85,7 @@ CREATE TABLE metrics
|
|||
metric_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
|
||||
title text NOT NULL,
|
||||
name text NOT NULL,
|
||||
is_public boolean NOT NULL DEFAULT FALSE,
|
||||
created_at timestamp default timezone('utc'::text, now()) not null,
|
||||
deleted_at timestamp
|
||||
|
|
@ -96,11 +96,28 @@ CREATE TABLE metric_series
|
|||
series_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
metric_id integer REFERENCES metrics (metric_id) ON DELETE CASCADE,
|
||||
index integer NOT NULL,
|
||||
title text NULL,
|
||||
name text NULL,
|
||||
filter jsonb NOT NULL,
|
||||
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
|
||||
deleted_at timestamp
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS metric_series_metric_id_idx ON public.metric_series (metric_id);
|
||||
CREATE INDEX IF NOT EXISTS funnels_project_id_idx ON public.funnels (project_id);
|
||||
|
||||
|
||||
CREATE TABLE searches
|
||||
(
|
||||
search_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
|
||||
name text not null,
|
||||
filter jsonb not null,
|
||||
created_at timestamp default timezone('utc'::text, now()) not null,
|
||||
deleted_at timestamp,
|
||||
is_public boolean NOT NULL DEFAULT False
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS searches_user_id_is_public_idx ON public.searches (user_id, is_public);
|
||||
CREATE INDEX IF NOT EXISTS searches_project_id_idx ON public.searches (project_id);
|
||||
|
||||
COMMIT;
|
||||
|
|
@ -382,6 +382,7 @@ $$
|
|||
);
|
||||
|
||||
CREATE INDEX funnels_user_id_is_public_idx ON public.funnels (user_id, is_public);
|
||||
CREATE INDEX funnels_project_id_idx ON public.funnels (project_id);
|
||||
|
||||
-- --- announcements.sql ---
|
||||
|
||||
|
|
@ -979,7 +980,7 @@ $$
|
|||
metric_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
|
||||
title text NOT NULL,
|
||||
name text NOT NULL,
|
||||
is_public boolean NOT NULL DEFAULT FALSE,
|
||||
created_at timestamp default timezone('utc'::text, now()) not null,
|
||||
deleted_at timestamp
|
||||
|
|
@ -990,13 +991,28 @@ $$
|
|||
series_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
metric_id integer REFERENCES metrics (metric_id) ON DELETE CASCADE,
|
||||
index integer NOT NULL,
|
||||
title text NULL,
|
||||
name text NULL,
|
||||
filter jsonb NOT NULL,
|
||||
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
|
||||
deleted_at timestamp
|
||||
);
|
||||
CREATE INDEX metric_series_metric_id_idx ON public.metric_series (metric_id);
|
||||
|
||||
CREATE TABLE searches
|
||||
(
|
||||
search_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
|
||||
name text not null,
|
||||
filter jsonb not null,
|
||||
created_at timestamp default timezone('utc'::text, now()) not null,
|
||||
deleted_at timestamp,
|
||||
is_public boolean NOT NULL DEFAULT False
|
||||
);
|
||||
|
||||
CREATE INDEX searches_user_id_is_public_idx ON public.searches (user_id, is_public);
|
||||
CREATE INDEX searches_project_id_idx ON public.searches (project_id);
|
||||
|
||||
raise notice 'DB created';
|
||||
END IF;
|
||||
END;
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ CREATE TABLE metrics
|
|||
metric_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
|
||||
title text NOT NULL,
|
||||
name text NOT NULL,
|
||||
is_public boolean NOT NULL DEFAULT FALSE,
|
||||
created_at timestamp default timezone('utc'::text, now()) not null,
|
||||
deleted_at timestamp
|
||||
|
|
@ -47,11 +47,29 @@ CREATE TABLE metric_series
|
|||
series_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
metric_id integer REFERENCES metrics (metric_id) ON DELETE CASCADE,
|
||||
index integer NOT NULL,
|
||||
title text NULL,
|
||||
name text NULL,
|
||||
filter jsonb NOT NULL,
|
||||
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
|
||||
deleted_at timestamp
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS metric_series_metric_id_idx ON public.metric_series (metric_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS funnels_project_id_idx ON public.funnels (project_id);
|
||||
|
||||
|
||||
CREATE TABLE searches
|
||||
(
|
||||
search_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
|
||||
name text not null,
|
||||
filter jsonb not null,
|
||||
created_at timestamp default timezone('utc'::text, now()) not null,
|
||||
deleted_at timestamp,
|
||||
is_public boolean NOT NULL DEFAULT False
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS searches_user_id_is_public_idx ON public.searches (user_id, is_public);
|
||||
CREATE INDEX IF NOT EXISTS searches_project_id_idx ON public.searches (project_id);
|
||||
|
||||
COMMIT;
|
||||
|
|
@ -349,6 +349,7 @@ $$
|
|||
);
|
||||
|
||||
CREATE INDEX funnels_user_id_is_public_idx ON public.funnels (user_id, is_public);
|
||||
CREATE INDEX funnels_project_id_idx ON public.funnels (project_id);
|
||||
|
||||
-- --- announcements.sql ---
|
||||
|
||||
|
|
@ -924,7 +925,7 @@ $$
|
|||
metric_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
|
||||
title text NOT NULL,
|
||||
name text NOT NULL,
|
||||
is_public boolean NOT NULL DEFAULT FALSE,
|
||||
created_at timestamp default timezone('utc'::text, now()) not null,
|
||||
deleted_at timestamp
|
||||
|
|
@ -935,13 +936,30 @@ $$
|
|||
series_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
metric_id integer REFERENCES metrics (metric_id) ON DELETE CASCADE,
|
||||
index integer NOT NULL,
|
||||
title text NULL,
|
||||
name text NULL,
|
||||
filter jsonb NOT NULL,
|
||||
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
|
||||
deleted_at timestamp
|
||||
);
|
||||
CREATE INDEX metric_series_metric_id_idx ON public.metric_series (metric_id);
|
||||
|
||||
|
||||
CREATE TABLE searches
|
||||
(
|
||||
search_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
|
||||
name text not null,
|
||||
filter jsonb not null,
|
||||
created_at timestamp default timezone('utc'::text, now()) not null,
|
||||
deleted_at timestamp,
|
||||
is_public boolean NOT NULL DEFAULT False
|
||||
);
|
||||
|
||||
CREATE INDEX searches_user_id_is_public_idx ON public.searches (user_id, is_public);
|
||||
CREATE INDEX searches_project_id_idx ON public.searches (project_id);
|
||||
|
||||
|
||||
raise notice 'DB created';
|
||||
END IF;
|
||||
END;
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue