Api v1.15.0 (#1516)
* feat(chalice): upgraded dependencies * feat(chalice): changed path analysis schema * feat(DB): click coordinate support * feat(chalice): changed path analysis issues schema feat(chalice): upgraded dependencies * fix(chalice): fixed pydantic issue * refactor(chalice): refresh token validator * feat(chalice): role restrictions * feat(chalice): EE path analysis changes * refactor(DB): changed creation queries refactor(DB): changed delte queries feat(DB): support new path analysis payload * feat(chalice): save path analysis card * feat(chalice): restrict access * feat(chalice): restrict access * feat(chalice): EE save new path analysis card * refactor(chalice): path analysis * feat(chalice): path analysis new query * fix(chalice): configurable CH config * fix(chalice): assist autocomplete * refactor(chalice): refactored permissions * refactor(chalice): changed log level * refactor(chalice): upgraded dependencies * refactor(chalice): changed path analysis query * refactor(chalice): changed path analysis query * refactor(chalice): upgraded dependencies refactor(alerts): upgraded dependencies refactor(crons): upgraded dependencies * feat(chalice): path analysis ignore start point * feat(chalice): path analysis in progress * refactor(chalice): path analysis changed link sort * refactor(chalice): path analysis changed link sort * refactor(chalice): path analysis changed link sort * refactor(chalice): path analysis new query refactor(chalice): authorizers * refactor(chalice): refactored authorizer * fix(chalice): fixed create card of PathAnalysis * refactor(chalice): compute link-percentage for Path Analysis * refactor(chalice): remove null starting point from Path Analysis * feat(chalice): path analysis CH query * refactor(chalice): changed Path Analysis links-value fix(chalice): fixed search notes for EE * feat(chalice): path analysis enhanced query results * feat(chalice): include timezone in search sessions response * refactor(chalice): refactored logs * refactor(chalice): refactored logs feat(chalice): get path analysis issues
This commit is contained in:
parent
494a84348a
commit
babaa34815
18 changed files with 273 additions and 313 deletions
|
|
@ -1,3 +1,4 @@
|
|||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import Request
|
||||
|
|
@ -8,6 +9,8 @@ from starlette.exceptions import HTTPException
|
|||
from chalicelib.core import authorizers
|
||||
from schemas import CurrentAPIContext
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIKeyAuth(APIKeyHeader):
|
||||
def __init__(self, auto_error: bool = True):
|
||||
|
|
@ -22,7 +25,7 @@ class APIKeyAuth(APIKeyHeader):
|
|||
detail="Invalid API Key",
|
||||
)
|
||||
r["authorizer_identity"] = "api_key"
|
||||
print(r)
|
||||
logger.debug(r)
|
||||
request.state.authorizer_identity = "api_key"
|
||||
request.state.currentContext = CurrentAPIContext(tenant_id=r["tenantId"])
|
||||
return request.state.currentContext
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import logging
|
||||
|
||||
from fastapi import Request
|
||||
from starlette import status
|
||||
from starlette.exceptions import HTTPException
|
||||
|
|
@ -6,6 +8,8 @@ import schemas
|
|||
from chalicelib.core import projects
|
||||
from or_dependencies import OR_context
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProjectAuthorizer:
|
||||
def __init__(self, project_identifier):
|
||||
|
|
@ -21,6 +25,6 @@ class ProjectAuthorizer:
|
|||
or projects.get_project(project_id=value, tenant_id=current_user.tenant_id) is None)) \
|
||||
or (self.project_identifier == "projectKey" \
|
||||
and projects.get_internal_project_id(project_key=value) is None):
|
||||
print("project not found")
|
||||
print(value)
|
||||
logger.debug("project not found")
|
||||
logger.debug(value)
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="project not found.")
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import json
|
||||
from typing import Union, List
|
||||
import logging
|
||||
from typing import Union
|
||||
|
||||
from decouple import config
|
||||
from fastapi import HTTPException, status
|
||||
|
|
@ -10,6 +11,7 @@ from chalicelib.utils import helper, pg_client
|
|||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.storage import StorageClient
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
PIE_CHART_GROUP = 5
|
||||
|
||||
|
||||
|
|
@ -75,7 +77,7 @@ def __get_errors_list(project_id, user_id, data: schemas.CardSchema):
|
|||
|
||||
def __get_sessions_list(project_id, user_id, data: schemas.CardSchema):
|
||||
if len(data.series) == 0:
|
||||
print("empty series")
|
||||
logger.debug("empty series")
|
||||
return {
|
||||
"total": 0,
|
||||
"sessions": []
|
||||
|
|
@ -312,8 +314,11 @@ def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.Card
|
|||
card_table.series[0].filter.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
|
||||
operator=schemas.SearchEventOperator._not_on,
|
||||
value=s.value))
|
||||
result = __get_table_of_issues(project_id=project_id, user_id=user_id, data=card_table)
|
||||
return result[0] if len(result) > 0 else {}
|
||||
# result = __get_table_of_issues(project_id=project_id, user_id=user_id, data=card_table)
|
||||
result = sessions.search_table_of_individual_issues(project_id=project_id,
|
||||
metric_value=card_table.metric_value,
|
||||
data=card_table)
|
||||
return result
|
||||
|
||||
|
||||
def get_issues(project_id: int, user_id: int, data: schemas.CardSchema):
|
||||
|
|
@ -336,7 +341,6 @@ def __get_path_analysis_card_info(data: schemas.CardPathAnalysis):
|
|||
r = {"start_point": [s.model_dump() for s in data.start_point],
|
||||
"start_type": data.start_type,
|
||||
"exclude": [e.model_dump() for e in data.excludes]}
|
||||
print(r)
|
||||
return r
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -8,38 +8,12 @@ from chalicelib.utils import pg_client
|
|||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from time import time
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def __transform_journey2(rows, reverse_path=False):
|
||||
# nodes should contain duplicates for different steps otherwise the UI crashes
|
||||
nodes = []
|
||||
nodes_values = []
|
||||
links = []
|
||||
for r in rows:
|
||||
source = f"{r['event_number_in_session']}_{r['event_type']}_{r['e_value']}"
|
||||
if source not in nodes:
|
||||
nodes.append(source)
|
||||
nodes_values.append({"name": r['e_value'], "eventType": r['event_type']})
|
||||
if r['next_value']:
|
||||
target = f"{r['event_number_in_session'] + 1}_{r['next_type']}_{r['next_value']}"
|
||||
if target not in nodes:
|
||||
nodes.append(target)
|
||||
nodes_values.append({"name": r['next_value'], "eventType": r['next_type']})
|
||||
link = {"eventType": r['event_type'], "value": r["sessions_count"],
|
||||
"avgTimeToTarget": r["avg_time_to_target"]}
|
||||
if not reverse_path:
|
||||
link["source"] = nodes.index(source)
|
||||
link["target"] = nodes.index(target)
|
||||
else:
|
||||
link["source"] = nodes.index(target)
|
||||
link["target"] = nodes.index(source)
|
||||
links.append(link)
|
||||
|
||||
return {"nodes": nodes_values,
|
||||
"links": sorted(links, key=lambda x: (x["source"], x["target"]), reverse=False)}
|
||||
|
||||
|
||||
def __transform_journey3(rows, reverse_path=False):
|
||||
def __transform_journey(rows, reverse_path=False):
|
||||
total_100p = 0
|
||||
number_of_step1 = 0
|
||||
for r in rows:
|
||||
|
|
@ -48,10 +22,10 @@ def __transform_journey3(rows, reverse_path=False):
|
|||
number_of_step1 += 1
|
||||
total_100p += r["sessions_count"]
|
||||
for i in range(number_of_step1):
|
||||
rows[i]["value"] = round(number=100 / number_of_step1, ndigits=2)
|
||||
rows[i]["value"] = 100 / number_of_step1
|
||||
|
||||
for i in range(number_of_step1, len(rows)):
|
||||
rows[i]["value"] = round(number=rows[i]["sessions_count"] * 100 / total_100p, ndigits=2)
|
||||
rows[i]["value"] = rows[i]["sessions_count"] * 100 / total_100p
|
||||
|
||||
nodes = []
|
||||
nodes_values = []
|
||||
|
|
@ -88,9 +62,12 @@ JOURNEY_TYPES = {
|
|||
}
|
||||
|
||||
|
||||
# query: Q3, the result is correct,
|
||||
# query: Q4, the result is correct,
|
||||
# startPoints are computed before ranked_events to reduce the number of window functions over rows
|
||||
# replaced time_to_target by time_from_previous
|
||||
# compute avg_time_from_previous at the same level as sessions_count
|
||||
# sort by top 5 according to sessions_count at the CTE level
|
||||
# final part project data without grouping
|
||||
def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
|
||||
sub_events = []
|
||||
start_points_from = "pre_ranked_events"
|
||||
|
|
@ -324,26 +301,26 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
|
|||
start_points_conditions.append("event_number_in_session = 1")
|
||||
|
||||
steps_query = ["""n1 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
time_from_previous,
|
||||
count(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
INNER JOIN start_points USING (session_id)
|
||||
WHERE event_number_in_session = 1 AND next_value IS NOT NULL
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value, time_from_previous)"""]
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
AVG(time_from_previous) AS avg_time_from_previous,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events INNER JOIN start_points USING (session_id)
|
||||
WHERE event_number_in_session = 1
|
||||
AND next_value IS NOT NULL
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT %(eventThresholdNumberInGroup)s)"""]
|
||||
projection_query = ["""(SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count,
|
||||
avg(time_from_previous) AS avg_time_from_previous
|
||||
FROM n1
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value, sessions_count
|
||||
ORDER BY event_number_in_session, event_type, e_value, next_type, next_value)"""]
|
||||
avg_time_from_previous
|
||||
FROM n1)"""]
|
||||
for i in range(2, data.density):
|
||||
steps_query.append(f"""n{i} AS (SELECT *
|
||||
FROM (SELECT re.event_number_in_session,
|
||||
|
|
@ -351,13 +328,12 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
|
|||
re.e_value,
|
||||
re.next_type,
|
||||
re.next_value,
|
||||
re.time_from_previous,
|
||||
count(1) AS sessions_count
|
||||
AVG(re.time_from_previous) AS avg_time_from_previous,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events AS re
|
||||
INNER JOIN n{i - 1} ON (n{i - 1}.next_value = re.e_value)
|
||||
WHERE re.event_number_in_session = {i}
|
||||
GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type, re.next_value,
|
||||
re.time_from_previous) AS sub_level
|
||||
GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type, re.next_value) AS sub_level
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT %(eventThresholdNumberInGroup)s)""")
|
||||
projection_query.append(f"""(SELECT event_number_in_session,
|
||||
|
|
@ -366,10 +342,8 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
|
|||
next_type,
|
||||
next_value,
|
||||
sessions_count,
|
||||
avg(time_from_previous) AS avg_time_from_previous
|
||||
FROM n{i}
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value, sessions_count
|
||||
ORDER BY event_number_in_session, event_type, e_value, next_type, next_value)""")
|
||||
avg_time_from_previous
|
||||
FROM n{i})""")
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
pg_query = f"""\
|
||||
|
|
@ -394,26 +368,25 @@ WITH sub_sessions AS (SELECT session_id
|
|||
LEAD(event_type, 1) OVER (PARTITION BY session_id ORDER BY timestamp {path_direction}) AS next_type,
|
||||
abs(LAG(timestamp, 1) OVER (PARTITION BY session_id ORDER BY timestamp {path_direction}) -
|
||||
timestamp) AS time_from_previous
|
||||
FROM pre_ranked_events
|
||||
INNER JOIN start_points USING (session_id)),
|
||||
FROM pre_ranked_events INNER JOIN start_points USING (session_id)),
|
||||
{",".join(steps_query)}
|
||||
{"UNION ALL".join(projection_query)};"""
|
||||
params = {"project_id": project_id, "startTimestamp": data.startTimestamp,
|
||||
"endTimestamp": data.endTimestamp, "density": data.density,
|
||||
"eventThresholdNumberInGroup": 6 if data.hide_excess else 8,
|
||||
"eventThresholdNumberInGroup": 4 if data.hide_excess else 8,
|
||||
**extra_values}
|
||||
query = cur.mogrify(pg_query, params)
|
||||
_now = time()
|
||||
|
||||
cur.execute(query)
|
||||
if time() - _now > 2:
|
||||
print(f">>>>>>>>>PathAnalysis long query ({int(time() - _now)}s)<<<<<<<<<")
|
||||
print("----------------------")
|
||||
print(query)
|
||||
print("----------------------")
|
||||
if True or time() - _now > 2:
|
||||
logger.info(f">>>>>>>>>PathAnalysis long query ({int(time() - _now)}s)<<<<<<<<<")
|
||||
logger.info("----------------------")
|
||||
logger.info(query)
|
||||
logger.info("----------------------")
|
||||
rows = cur.fetchall()
|
||||
|
||||
return __transform_journey3(rows=rows, reverse_path=reverse)
|
||||
return __transform_journey(rows=rows, reverse_path=reverse)
|
||||
|
||||
#
|
||||
# def __compute_weekly_percentage(rows):
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import logging
|
||||
from typing import List
|
||||
|
||||
import schemas
|
||||
|
|
@ -5,6 +6,8 @@ from chalicelib.core import events, metadata, projects, performance_event, sessi
|
|||
from chalicelib.utils import pg_client, helper, metrics_helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
SESSION_PROJECTION_COLS = """s.project_id,
|
||||
s.session_id::text AS session_id,
|
||||
s.user_uuid,
|
||||
|
|
@ -24,6 +27,7 @@ s.errors_count,
|
|||
s.user_anonymous_id,
|
||||
s.platform,
|
||||
s.issue_score,
|
||||
s.timezone,
|
||||
to_jsonb(s.issue_types) AS issue_types,
|
||||
favorite_sessions.session_id NOTNULL AS favorite,
|
||||
COALESCE((SELECT TRUE
|
||||
|
|
@ -120,17 +124,17 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
|
|||
ORDER BY s.session_id desc) AS filtred_sessions
|
||||
ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""",
|
||||
full_args)
|
||||
# print("--------------------")
|
||||
# print(main_query)
|
||||
# print("--------------------")
|
||||
logging.debug("--------------------")
|
||||
logging.debug(main_query)
|
||||
logging.debug("--------------------")
|
||||
try:
|
||||
cur.execute(main_query)
|
||||
except Exception as err:
|
||||
print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------")
|
||||
print(main_query.decode('UTF-8'))
|
||||
print("--------- PAYLOAD -----------")
|
||||
print(data.model_dump_json())
|
||||
print("--------------------")
|
||||
logging.warning("--------- SESSIONS SEARCH QUERY EXCEPTION -----------")
|
||||
logging.warning(main_query.decode('UTF-8'))
|
||||
logging.warning("--------- PAYLOAD -----------")
|
||||
logging.warning(data.model_dump_json())
|
||||
logging.warning("--------------------")
|
||||
raise err
|
||||
if errors_only or ids_only:
|
||||
return helper.list_to_camel_case(cur.fetchall())
|
||||
|
|
@ -196,24 +200,23 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
main_query = cur.mogrify(f"""SELECT count(DISTINCT s.session_id) AS count
|
||||
{query_part};""", full_args)
|
||||
|
||||
# print("--------------------")
|
||||
# print(main_query)
|
||||
# print("--------------------")
|
||||
logging.debug("--------------------")
|
||||
logging.debug(main_query)
|
||||
logging.debug("--------------------")
|
||||
try:
|
||||
cur.execute(main_query)
|
||||
except Exception as err:
|
||||
print("--------- SESSIONS-SERIES QUERY EXCEPTION -----------")
|
||||
print(main_query.decode('UTF-8'))
|
||||
print("--------- PAYLOAD -----------")
|
||||
print(data.model_dump_json())
|
||||
print("--------------------")
|
||||
logging.warning("--------- SESSIONS-SERIES QUERY EXCEPTION -----------")
|
||||
logging.warning(main_query.decode('UTF-8'))
|
||||
logging.warning("--------- PAYLOAD -----------")
|
||||
logging.warning(data.model_dump_json())
|
||||
logging.warning("--------------------")
|
||||
raise err
|
||||
if view_type == schemas.MetricTimeseriesViewType.line_chart:
|
||||
sessions = cur.fetchall()
|
||||
else:
|
||||
sessions = cur.fetchone()["count"]
|
||||
elif metric_type == schemas.MetricType.table:
|
||||
print(">>>>>>>>>>>>>TABLE")
|
||||
if isinstance(metric_of, schemas.MetricOfTable):
|
||||
main_col = "user_id"
|
||||
extra_col = ""
|
||||
|
|
@ -259,9 +262,9 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
GROUP BY {main_col}
|
||||
ORDER BY session_count DESC) AS users_sessions;""",
|
||||
full_args)
|
||||
# print("--------------------")
|
||||
# print(main_query)
|
||||
# print("--------------------")
|
||||
logging.debug("--------------------")
|
||||
logging.debug(main_query)
|
||||
logging.debug("--------------------")
|
||||
cur.execute(main_query)
|
||||
sessions = helper.dict_to_camel_case(cur.fetchone())
|
||||
for s in sessions["values"]:
|
||||
|
|
@ -330,14 +333,54 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
GROUP BY {main_col}
|
||||
ORDER BY session_count DESC) AS users_sessions;""",
|
||||
full_args)
|
||||
# print("--------------------")
|
||||
# print(main_query)
|
||||
# print("--------------------")
|
||||
logging.debug("--------------------")
|
||||
logging.debug(main_query)
|
||||
logging.debug("--------------------")
|
||||
cur.execute(main_query)
|
||||
sessions = helper.dict_to_camel_case(cur.fetchone())
|
||||
for s in sessions["values"]:
|
||||
s.pop("rn")
|
||||
|
||||
return sessions
|
||||
|
||||
|
||||
def search_table_of_individual_issues(data: schemas.SessionsSearchPayloadSchema, project_id: int,
|
||||
metric_value: List):
|
||||
if len(metric_value) > 0:
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
|
||||
operator=schemas.SearchEventOperator._is))
|
||||
full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False,
|
||||
favorite_only=False, issue=None, project_id=project_id,
|
||||
user_id=None)
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
full_args["issues_limit"] = data.limit
|
||||
full_args["issues_limit_s"] = (data.page - 1) * data.limit
|
||||
full_args["issues_limit_e"] = data.page * data.limit
|
||||
main_query = cur.mogrify(f"""SELECT COUNT(1) AS count,
|
||||
COALESCE(SUM(session_count), 0) AS total_sessions,
|
||||
COALESCE(JSONB_AGG(ranked_issues)
|
||||
FILTER ( WHERE rn>= %(issues_limit_s)s
|
||||
AND rn <= %(issues_limit_e)s ), '[]'::JSONB) AS values
|
||||
FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY session_count DESC) AS rn
|
||||
FROM (SELECT type AS name, context_string AS value, COUNT(DISTINCT session_id) AS session_count
|
||||
FROM (SELECT session_id
|
||||
{query_part}) AS filtered_sessions
|
||||
INNER JOIN events_common.issues USING (session_id)
|
||||
INNER JOIN public.issues USING (issue_id)
|
||||
WHERE project_id = %(project_id)s
|
||||
AND timestamp >= %(startDate)s
|
||||
AND timestamp <= %(endDate)s
|
||||
GROUP BY type, context_string
|
||||
ORDER BY session_count DESC) AS filtered_issues
|
||||
) AS ranked_issues;""", full_args)
|
||||
logging.debug("--------------------")
|
||||
logging.debug(main_query)
|
||||
logging.debug("--------------------")
|
||||
cur.execute(main_query)
|
||||
sessions = helper.dict_to_camel_case(cur.fetchone())
|
||||
for s in sessions["values"]:
|
||||
s.pop("rn")
|
||||
# sessions["values"] = helper.list_to_camel_case(sessions["values"])
|
||||
|
||||
return sessions
|
||||
|
||||
|
|
@ -842,7 +885,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
value_key=e_k_f))
|
||||
apply = True
|
||||
else:
|
||||
print(f"undefined FETCH filter: {f.type}")
|
||||
logging.warning(f"undefined FETCH filter: {f.type}")
|
||||
if not apply:
|
||||
continue
|
||||
elif event_type == schemas.EventType.graphql:
|
||||
|
|
@ -869,7 +912,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
event_where.append(
|
||||
sh.multi_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
|
||||
else:
|
||||
print(f"undefined GRAPHQL filter: {f.type}")
|
||||
logging.warning(f"undefined GRAPHQL filter: {f.type}")
|
||||
else:
|
||||
continue
|
||||
if event_index == 0 or or_events:
|
||||
|
|
|
|||
|
|
@ -1,7 +1,12 @@
|
|||
from decouple import config
|
||||
import logging
|
||||
|
||||
import requests
|
||||
from decouple import config
|
||||
|
||||
from chalicelib.utils import helper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def __get_captcha_config():
|
||||
return config("captcha_server"), config("captcha_key")
|
||||
|
|
@ -9,16 +14,16 @@ def __get_captcha_config():
|
|||
|
||||
def is_valid(response):
|
||||
if not helper.allow_captcha():
|
||||
print("!! Captcha is disabled")
|
||||
logger.info("!! Captcha is disabled")
|
||||
return True
|
||||
url, secret = __get_captcha_config()
|
||||
r = requests.post(url=url, data={"secret": secret, "response": response})
|
||||
if r.status_code != 200:
|
||||
print("something went wrong")
|
||||
print(r)
|
||||
print(r.status_code)
|
||||
print(r.text)
|
||||
logger.warning("something went wrong")
|
||||
logger.error(r)
|
||||
logger.warning(r.status_code)
|
||||
logger.warning(r.text)
|
||||
return
|
||||
r = r.json()
|
||||
print(r)
|
||||
logger.debug(r)
|
||||
return r["success"]
|
||||
|
|
|
|||
|
|
@ -2,6 +2,9 @@ from functools import wraps
|
|||
from time import time
|
||||
import inspect
|
||||
from chalicelib.utils import helper
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def timed(f):
|
||||
|
|
@ -13,7 +16,7 @@ def timed(f):
|
|||
result = f(*args, **kwds)
|
||||
elapsed = time() - start
|
||||
if inspect.stack()[1][3] == "_view_func":
|
||||
print("DEBUG: %s: took %d s to finish" % (f.__name__, elapsed))
|
||||
logging.debug("%s: took %d s to finish" % (f.__name__, elapsed))
|
||||
else:
|
||||
call_stack = [i[3] for i in inspect.stack()[1:] if i[3] != "wrapper"]
|
||||
call_stack = [c for c in call_stack if
|
||||
|
|
@ -22,7 +25,7 @@ def timed(f):
|
|||
'_bootstrap', '_main_rest_api_handler', '_user_handler',
|
||||
'_get_view_function_response', 'wrapped_event', 'handle_one_request',
|
||||
'_global_error_handler', 'openreplay_middleware']]
|
||||
print("DEBUG: %s > %s took %d s to finish" % (" > ".join(call_stack), f.__name__, elapsed))
|
||||
logger.debug("%s > %s took %d s to finish" % (" > ".join(call_stack), f.__name__, elapsed))
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
|
|
|||
|
|
@ -10,9 +10,7 @@ from decouple import config
|
|||
|
||||
from chalicelib.utils import smtp
|
||||
|
||||
loglevel = config("LOGLEVEL", default=logging.INFO)
|
||||
print(f">Loglevel set to: {loglevel}")
|
||||
logging.basicConfig(level=loglevel)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def __get_subject(subject):
|
||||
|
|
|
|||
|
|
@ -1,8 +1,11 @@
|
|||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
from fastapi import HTTPException, status
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class github_formatters:
|
||||
|
||||
|
|
@ -122,8 +125,8 @@ class githubV3Request:
|
|||
pages = get_response_links(response)
|
||||
result = response.json()
|
||||
if response.status_code != 200:
|
||||
print(f"=>GITHUB Exception")
|
||||
print(result)
|
||||
logger.warning(f"=>GITHUB Exception")
|
||||
logger.error(result)
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"GITHUB: {result['message']}")
|
||||
if isinstance(result, dict):
|
||||
return result
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import logging
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
|
|
@ -7,6 +8,7 @@ from jira import JIRA
|
|||
from jira.exceptions import JIRAError
|
||||
from requests.auth import HTTPBasicAuth
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
fields = "id, summary, description, creator, reporter, created, assignee, status, updated, comment, issuetype, labels"
|
||||
|
||||
|
||||
|
|
@ -19,8 +21,8 @@ class JiraManager:
|
|||
try:
|
||||
self._jira = JIRA(url, basic_auth=(username, password), logging=True, max_retries=0, timeout=3)
|
||||
except Exception as e:
|
||||
print("!!! JIRA AUTH ERROR")
|
||||
print(e)
|
||||
logger.warning("!!! JIRA AUTH ERROR")
|
||||
logger.error(e)
|
||||
raise e
|
||||
|
||||
def set_jira_project_id(self, project_id):
|
||||
|
|
@ -34,7 +36,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_projects()
|
||||
print(f"=>JIRA Exception {e.text}")
|
||||
logger.error(f"=>JIRA Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
projects_dict_list = []
|
||||
for project in projects:
|
||||
|
|
@ -50,7 +52,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_project()
|
||||
print(f"=>Exception {e.text}")
|
||||
logger.error(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
return self.__parser_project_info(project)
|
||||
|
||||
|
|
@ -66,12 +68,11 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_issues(sql, offset)
|
||||
print(f"=>Exception {e.text}")
|
||||
logger.error(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
|
||||
issue_dict_list = []
|
||||
for issue in issues:
|
||||
# print(issue.raw)
|
||||
issue_dict_list.append(self.__parser_issue_info(issue, include_comments=False))
|
||||
|
||||
# return {"total": issues.total, "issues": issue_dict_list}
|
||||
|
|
@ -86,7 +87,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_issue(issue_id)
|
||||
print(f"=>Exception {e.text}")
|
||||
logger.error(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
return self.__parser_issue_info(issue)
|
||||
|
||||
|
|
@ -106,7 +107,7 @@ class JiraManager:
|
|||
if self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_issue_v3(issue_id)
|
||||
print(f"=>Exception {e}")
|
||||
logger.error(f"=>Exception {e}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: get issue error")
|
||||
return self.__parser_issue_info(issue.json())
|
||||
|
||||
|
|
@ -120,7 +121,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.create_issue(issue_dict)
|
||||
print(f"=>Exception {e.text}")
|
||||
logger.error(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
|
||||
def close_issue(self, issue):
|
||||
|
|
@ -132,7 +133,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.close_issue(issue)
|
||||
print(f"=>Exception {e.text}")
|
||||
logger.error(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
|
||||
def assign_issue(self, issue_id, account_id) -> bool:
|
||||
|
|
@ -143,7 +144,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.assign_issue(issue_id, account_id)
|
||||
print(f"=>Exception {e.text}")
|
||||
logger.error(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
|
||||
def add_comment(self, issue_id: str, comment: str):
|
||||
|
|
@ -154,7 +155,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.add_comment(issue_id, comment)
|
||||
print(f"=>Exception {e.text}")
|
||||
logger.error(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
return self.__parser_comment_info(comment)
|
||||
|
||||
|
|
@ -191,7 +192,7 @@ class JiraManager:
|
|||
if self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.add_comment_v3(issue_id, comment)
|
||||
print(f"=>Exception {e}")
|
||||
logger.error(f"=>Exception {e}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: comment error")
|
||||
return self.__parser_comment_info(comment_response.json())
|
||||
|
||||
|
|
@ -207,7 +208,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_comments(issueKey)
|
||||
print(f"=>Exception {e.text}")
|
||||
logger.error(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
|
||||
def get_meta(self):
|
||||
|
|
@ -224,7 +225,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_assignable_users()
|
||||
print(f"=>Exception {e.text}")
|
||||
logger.error(f"=>Exception {e.text}")
|
||||
if e.status_code == 401:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="JIRA: 401 Unauthorized")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
|
|
@ -247,7 +248,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_issue_types()
|
||||
print(f"=>Exception {e.text}")
|
||||
logger.error(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
types_dict = []
|
||||
for type in types:
|
||||
|
|
|
|||
|
|
@ -7,8 +7,7 @@ import psycopg2.extras
|
|||
from decouple import config
|
||||
from psycopg2 import pool
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
logging.getLogger('apscheduler').setLevel(config("LOGLEVEL", default=logging.INFO))
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_PG_CONFIG = {"host": config("pg_host"),
|
||||
"database": config("pg_dbname"),
|
||||
|
|
@ -41,8 +40,8 @@ class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool):
|
|||
self._semaphore.release()
|
||||
except psycopg2.pool.PoolError as e:
|
||||
if str(e) == "trying to put unkeyed connection":
|
||||
print("!!! trying to put unkeyed connection")
|
||||
print(f"env-PG_POOL:{config('PG_POOL', default=None)}")
|
||||
logger.warning("!!! trying to put unkeyed connection")
|
||||
logger.warning(f"env-PG_POOL:{config('PG_POOL', default=None)}")
|
||||
return
|
||||
raise e
|
||||
|
||||
|
|
|
|||
|
|
@ -118,7 +118,7 @@ def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(..
|
|||
# @app.post('/{projectId}/metrics/try/issues', tags=["dashboard"])
|
||||
# @app.post('/{projectId}/custom_metrics/try/issues', tags=["customMetrics"])
|
||||
def try_card_issues(projectId: int, data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": custom_metrics.get_issues(project_id=projectId, user_id=context.user_id, data=data)}
|
||||
|
||||
|
||||
|
|
@ -216,9 +216,9 @@ def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: st
|
|||
@app.post('/{projectId}/cards/{metric_id}/errors', tags=["dashboard"])
|
||||
# @app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"])
|
||||
# @app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"])
|
||||
def get_custom_metric_errors_list(projectId: int, metric_id: int,
|
||||
data: schemas.CardSessionsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
def get_card_errors_list(projectId: int, metric_id: int,
|
||||
data: schemas.CardSessionsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id,
|
||||
metric_id=metric_id, data=data)
|
||||
if data is None:
|
||||
|
|
@ -241,8 +241,8 @@ def get_card_chart(projectId: int, metric_id: int, request: Request, data: schem
|
|||
# @app.put('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
|
||||
# @app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
|
||||
# @app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
|
||||
def update_custom_metric(projectId: int, metric_id: int, data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
def update_card(projectId: int, metric_id: int, data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = custom_metrics.update_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
|
||||
if data is None:
|
||||
return {"errors": ["custom metric not found"]}
|
||||
|
|
@ -254,9 +254,9 @@ def update_custom_metric(projectId: int, metric_id: int, data: schemas.CardSchem
|
|||
# @app.put('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"])
|
||||
# @app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"])
|
||||
# @app.put('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"])
|
||||
def update_custom_metric_state(projectId: int, metric_id: int,
|
||||
data: schemas.UpdateCardStatusSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
def update_card_state(projectId: int, metric_id: int,
|
||||
data: schemas.UpdateCardStatusSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {
|
||||
"data": custom_metrics.change_state(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
|
||||
status=data.active)}
|
||||
|
|
@ -265,6 +265,6 @@ def update_custom_metric_state(projectId: int, metric_id: int,
|
|||
@app.delete('/{projectId}/cards/{metric_id}', tags=["dashboard"])
|
||||
# @app.delete('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
|
||||
# @app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
|
||||
def delete_custom_metric(projectId: int, metric_id: int, _=Body(None),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
def delete_card(projectId: int, metric_id: int, _=Body(None),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": custom_metrics.delete_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id)}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import logging
|
||||
|
||||
from fastapi import Request
|
||||
from starlette import status
|
||||
from starlette.exceptions import HTTPException
|
||||
|
|
@ -6,6 +8,8 @@ import schemas
|
|||
from chalicelib.core import projects
|
||||
from or_dependencies import OR_context
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProjectAuthorizer:
|
||||
def __init__(self, project_identifier):
|
||||
|
|
@ -24,6 +28,6 @@ class ProjectAuthorizer:
|
|||
and not projects.is_authorized(
|
||||
project_id=projects.get_internal_project_id(value),
|
||||
tenant_id=current_user.tenant_id, user_id=user_id)):
|
||||
print("unauthorized project")
|
||||
print(value)
|
||||
logger.debug("unauthorized project")
|
||||
logger.debug(value)
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="unauthorized project.")
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import json
|
||||
import logging
|
||||
from typing import Union
|
||||
|
||||
from decouple import config
|
||||
|
|
@ -12,7 +13,7 @@ from chalicelib.utils.TimeUTC import TimeUTC
|
|||
from chalicelib.utils.storage import StorageClient, extra
|
||||
|
||||
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
|
||||
print(">>> Using experimental error search")
|
||||
logging.info(">>> Using experimental error search")
|
||||
from . import errors_exp as errors
|
||||
else:
|
||||
from . import errors as errors
|
||||
|
|
@ -22,6 +23,7 @@ if config("EXP_SESSIONS_SEARCH_METRIC", cast=bool, default=False):
|
|||
else:
|
||||
from chalicelib.core import sessions
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
PIE_CHART_GROUP = 5
|
||||
|
||||
|
||||
|
|
@ -87,7 +89,7 @@ def __get_errors_list(project_id, user_id, data: schemas.CardSchema):
|
|||
|
||||
def __get_sessions_list(project_id, user_id, data: schemas.CardSchema):
|
||||
if len(data.series) == 0:
|
||||
print("empty series")
|
||||
logger.debug("empty series")
|
||||
return {
|
||||
"total": 0,
|
||||
"sessions": []
|
||||
|
|
@ -108,11 +110,6 @@ def __get_click_map_chart(project_id, user_id, data: schemas.CardClickMap, inclu
|
|||
include_mobs=include_mobs)
|
||||
|
||||
|
||||
# EE only
|
||||
def __is_insights(data: schemas.CardSchema):
|
||||
return data.metric_type == schemas.MetricType.insights
|
||||
|
||||
|
||||
# EE only
|
||||
def __get_insights_chart(project_id: int, data: schemas.CardInsights, user_id: int = None):
|
||||
return sessions_insights.fetch_selected(project_id=project_id,
|
||||
|
|
@ -317,7 +314,7 @@ def __get_funnel_issues(project_id: int, user_id: int, data: schemas.CardFunnel)
|
|||
|
||||
def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
|
||||
if len(data.series) == 0:
|
||||
return {"data": []}
|
||||
return {"data": {}}
|
||||
card_table = schemas.CardTable(
|
||||
startTimestamp=data.startTimestamp,
|
||||
endTimestamp=data.endTimestamp,
|
||||
|
|
@ -334,12 +331,12 @@ def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.Card
|
|||
card_table.series[0].filter.filters.insert(0, schemas.SessionSearchEventSchema2(type=s.type,
|
||||
operator=s.operator,
|
||||
value=s.value))
|
||||
for s in data.exclude:
|
||||
for s in data.excludes:
|
||||
card_table.series[0].filter.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
|
||||
operator=schemas.SearchEventOperator._not_on,
|
||||
value=s.value))
|
||||
|
||||
return __get_table_of_issues(project_id=project_id, user_id=user_id, data=card_table)
|
||||
result = __get_table_of_issues(project_id=project_id, user_id=user_id, data=card_table)
|
||||
return result[0] if len(result) > 0 else {}
|
||||
|
||||
|
||||
def get_issues(project_id: int, user_id: int, data: schemas.CardSchema):
|
||||
|
|
@ -361,8 +358,7 @@ def get_issues(project_id: int, user_id: int, data: schemas.CardSchema):
|
|||
def __get_path_analysis_card_info(data: schemas.CardPathAnalysis):
|
||||
r = {"start_point": [s.model_dump() for s in data.start_point],
|
||||
"start_type": data.start_type,
|
||||
"exclude": [e.model_dump() for e in data.exclude]}
|
||||
print(r)
|
||||
"exclude": [e.model_dump() for e in data.excludes]}
|
||||
return r
|
||||
|
||||
|
||||
|
|
@ -383,8 +379,8 @@ def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
|
|||
try:
|
||||
extra.tag_session(file_key=k, tag_value=tag)
|
||||
except Exception as e:
|
||||
print(f"!!!Error while tagging: {k} to {tag} for clickMap")
|
||||
print(str(e))
|
||||
logger.warning(f"!!!Error while tagging: {k} to {tag} for clickMap")
|
||||
logger.error(str(e))
|
||||
session_data = json.dumps(session_data)
|
||||
_data = {"session_data": session_data}
|
||||
for i, s in enumerate(data.series):
|
||||
|
|
@ -596,8 +592,8 @@ def delete_card(project_id, metric_id, user_id):
|
|||
try:
|
||||
extra.tag_session(file_key=k, tag_value=tag)
|
||||
except Exception as e:
|
||||
print(f"!!!Error while tagging: {k} to {tag} for clickMap")
|
||||
print(str(e))
|
||||
logger.warning(f"!!!Error while tagging: {k} to {tag} for clickMap")
|
||||
logger.error(str(e))
|
||||
return {"state": "success"}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -3,44 +3,19 @@ from typing import List
|
|||
import schemas
|
||||
from chalicelib.core.metrics import __get_basic_constraints, __get_meta_constraint
|
||||
from chalicelib.core.metrics import __get_constraint_values, __complete_missing_steps
|
||||
from chalicelib.utils import ch_client
|
||||
from chalicelib.utils import ch_client, exp_ch_helper
|
||||
from chalicelib.utils import helper, dev
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.core import metadata
|
||||
from time import time
|
||||
|
||||
import logging
|
||||
|
||||
def __transform_journey2(rows, reverse_path=False):
|
||||
# nodes should contain duplicates for different steps otherwise the UI crashes
|
||||
nodes = []
|
||||
nodes_values = []
|
||||
links = []
|
||||
for r in rows:
|
||||
source = f"{r['event_number_in_session']}_{r['event_type']}_{r['e_value']}"
|
||||
if source not in nodes:
|
||||
nodes.append(source)
|
||||
nodes_values.append({"name": r['e_value'], "eventType": r['event_type']})
|
||||
if r['next_value']:
|
||||
target = f"{r['event_number_in_session'] + 1}_{r['next_type']}_{r['next_value']}"
|
||||
if target not in nodes:
|
||||
nodes.append(target)
|
||||
nodes_values.append({"name": r['next_value'], "eventType": r['next_type']})
|
||||
link = {"eventType": r['event_type'], "value": r["sessions_count"],
|
||||
"avgTimeToTarget": r["avg_time_to_target"]}
|
||||
if not reverse_path:
|
||||
link["source"] = nodes.index(source)
|
||||
link["target"] = nodes.index(target)
|
||||
else:
|
||||
link["source"] = nodes.index(target)
|
||||
link["target"] = nodes.index(source)
|
||||
links.append(link)
|
||||
|
||||
return {"nodes": nodes_values,
|
||||
"links": sorted(links, key=lambda x: (x["source"], x["target"]), reverse=False)}
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def __transform_journey3(rows, reverse_path=False):
|
||||
def __transform_journey(rows, reverse_path=False):
|
||||
total_100p = 0
|
||||
number_of_step1 = 0
|
||||
for r in rows:
|
||||
|
|
@ -49,10 +24,10 @@ def __transform_journey3(rows, reverse_path=False):
|
|||
number_of_step1 += 1
|
||||
total_100p += r["sessions_count"]
|
||||
for i in range(number_of_step1):
|
||||
rows[i]["value"] = round(number=100 / number_of_step1, ndigits=2)
|
||||
rows[i]["value"] = 100 / number_of_step1
|
||||
|
||||
for i in range(number_of_step1, len(rows)):
|
||||
rows[i]["value"] = round(number=rows[i]["sessions_count"] * 100 / total_100p, ndigits=2)
|
||||
rows[i]["value"] = rows[i]["sessions_count"] * 100 / total_100p
|
||||
|
||||
nodes = []
|
||||
nodes_values = []
|
||||
|
|
@ -89,9 +64,12 @@ JOURNEY_TYPES = {
|
|||
}
|
||||
|
||||
|
||||
# query: Q3, the result is correct,
|
||||
# query: Q4, the result is correct,
|
||||
# startPoints are computed before ranked_events to reduce the number of window functions over rows
|
||||
# replaced time_to_target by time_from_previous
|
||||
# compute avg_time_from_previous at the same level as sessions_count
|
||||
# sort by top 5 according to sessions_count at the CTE level
|
||||
# final part project data without grouping
|
||||
def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
|
||||
sub_events = []
|
||||
start_points_conditions = []
|
||||
|
|
@ -307,7 +285,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
|
|||
selected_event_type_sub_query = " OR ".join(selected_event_type_sub_query)
|
||||
ch_sub_query.append(f"({selected_event_type_sub_query})")
|
||||
|
||||
main_table = "experimental.events"
|
||||
main_table = exp_ch_helper.get_main_events_table(data.startTimestamp)
|
||||
if len(sessions_conditions) > 0:
|
||||
sessions_conditions.append(f"sessions.project_id = %(project_id)s")
|
||||
sessions_conditions.append(f"sessions.datetime >= toDateTime(%(startTimestamp)s / 1000)")
|
||||
|
|
@ -347,22 +325,22 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
|
|||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
time_from_previous,
|
||||
count(1) AS sessions_count
|
||||
AVG(time_from_previous) AS avg_time_from_previous,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 1
|
||||
AND isNotNull(next_value)
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value, time_from_previous
|
||||
LIMIT 5)"""]
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT %(eventThresholdNumberInGroup)s)"""]
|
||||
projection_query = ["""SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count,
|
||||
avg(time_from_previous) AS avg_time_from_previous
|
||||
FROM n1
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value, sessions_count"""]
|
||||
avg_time_from_previous
|
||||
FROM n1"""]
|
||||
for i in range(2, data.density):
|
||||
steps_query.append(f"""n{i} AS (SELECT *
|
||||
FROM (SELECT re.event_number_in_session AS event_number_in_session,
|
||||
|
|
@ -370,13 +348,12 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
|
|||
re.e_value AS e_value,
|
||||
re.next_type AS next_type,
|
||||
re.next_value AS next_value,
|
||||
re.time_from_previous AS time_from_previous,
|
||||
count(1) AS sessions_count
|
||||
AVG(re.time_from_previous) AS avg_time_from_previous,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM n{i - 1} INNER JOIN ranked_events AS re
|
||||
ON (n{i - 1}.next_value = re.e_value AND n{i - 1}.next_type = re.event_type)
|
||||
WHERE re.event_number_in_session = {i}
|
||||
GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type, re.next_value,
|
||||
re.time_from_previous) AS sub_level
|
||||
GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type, re.next_value) AS sub_level
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT %(eventThresholdNumberInGroup)s)""")
|
||||
projection_query.append(f"""SELECT event_number_in_session,
|
||||
|
|
@ -385,55 +362,12 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
|
|||
next_type,
|
||||
next_value,
|
||||
sessions_count,
|
||||
avg(time_from_previous) AS avg_time_from_previous
|
||||
FROM n{i}
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value, sessions_count""")
|
||||
avg_time_from_previous
|
||||
FROM n{i}""")
|
||||
|
||||
with ch_client.ClickHouseClient(database="experimental") as ch:
|
||||
time_key = TimeUTC.now()
|
||||
_now = time()
|
||||
ch_query = f"""\
|
||||
WITH full_ranked_events AS (SELECT session_id,
|
||||
event_type,
|
||||
{main_column} AS e_value,
|
||||
row_number() OVER (PARTITION BY session_id ORDER BY datetime {path_direction},message_id {path_direction}) AS event_number_in_session,
|
||||
leadInFrame(label)
|
||||
OVER (PARTITION BY session_id ORDER BY datetime {path_direction},message_id {path_direction} ROWS
|
||||
BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS next_value,
|
||||
leadInFrame(toNullable(event_type))
|
||||
OVER (PARTITION BY session_id ORDER BY datetime {path_direction},message_id {path_direction} ROWS
|
||||
BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS next_type,
|
||||
abs(leadInFrame(toNullable(datetime))
|
||||
OVER (PARTITION BY session_id ORDER BY datetime {path_direction},message_id {path_direction} ROWS
|
||||
BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) -
|
||||
events.datetime) AS time_to_next
|
||||
FROM {main_table}
|
||||
WHERE {" AND ".join(ch_sub_query)})
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count,
|
||||
avg(time_to_next) AS avg_time_to_target
|
||||
FROM (SELECT *
|
||||
FROM (SELECT *,
|
||||
row_number()
|
||||
OVER (PARTITION BY event_number_in_session, event_type, e_value ORDER BY sessions_count DESC ) AS _event_number_in_group
|
||||
FROM (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
time_to_next,
|
||||
count(1) AS sessions_count
|
||||
FROM ({start_points_subquery}) AS start_points
|
||||
INNER JOIN full_ranked_events USING (session_id)
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value,
|
||||
time_to_next) AS groupped_events) AS ranked_groupped_events
|
||||
WHERE _event_number_in_group < 9) AS limited_events
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value, sessions_count
|
||||
ORDER BY event_number_in_session, e_value, next_value;"""
|
||||
params = {"project_id": project_id, "startTimestamp": data.startTimestamp,
|
||||
"endTimestamp": data.endTimestamp, "density": data.density,
|
||||
"eventThresholdNumberInGroup": 6 if data.hide_excess else 8,
|
||||
|
|
@ -456,6 +390,12 @@ WITH pre_ranked_events AS (SELECT *
|
|||
SELECT *
|
||||
FROM pre_ranked_events;"""
|
||||
ch.execute(query=ch_query1, params=params)
|
||||
if time() - _now > 2:
|
||||
logger.info(f">>>>>>>>>PathAnalysis long query EE ({int(time() - _now)}s)<<<<<<<<<")
|
||||
logger.info("---------Q1-----------")
|
||||
logger.info(ch.format(ch_query1, params))
|
||||
logger.info("----------------------")
|
||||
_now = time()
|
||||
|
||||
ch_query2 = f"""\
|
||||
CREATE TEMPORARY TABLE ranked_events_{time_key} AS
|
||||
|
|
@ -477,6 +417,12 @@ WITH pre_ranked_events AS (SELECT *
|
|||
SELECT *
|
||||
FROM ranked_events;"""
|
||||
ch.execute(query=ch_query2, params=params)
|
||||
if time() - _now > 2:
|
||||
logger.info(f">>>>>>>>>PathAnalysis long query EE ({int(time() - _now)}s)<<<<<<<<<")
|
||||
logger.info("---------Q2-----------")
|
||||
logger.info(ch.format(ch_query2, params))
|
||||
logger.info("----------------------")
|
||||
_now = time()
|
||||
|
||||
ch_query3 = f"""\
|
||||
WITH ranked_events AS (SELECT *
|
||||
|
|
@ -488,17 +434,12 @@ ORDER BY event_number_in_session;"""
|
|||
rows = ch.execute(query=ch_query3, params=params)
|
||||
|
||||
if time() - _now > 2:
|
||||
print(f">>>>>>>>>PathAnalysis long query EE ({int(time() - _now)}s)<<<<<<<<<")
|
||||
print("----------------------")
|
||||
print("---------Q1-----------")
|
||||
print(ch.format(ch_query1, params))
|
||||
print("---------Q2-----------")
|
||||
print(ch.format(ch_query2, params))
|
||||
print("---------Q3-----------")
|
||||
print(ch.format(ch_query3, params))
|
||||
print("----------------------")
|
||||
logger.info(f">>>>>>>>>PathAnalysis long query EE ({int(time() - _now)}s)<<<<<<<<<")
|
||||
logger.info("---------Q3-----------")
|
||||
logger.info(ch.format(ch_query3, params))
|
||||
logger.info("----------------------")
|
||||
|
||||
return __transform_journey3(rows=rows, reverse_path=reverse)
|
||||
return __transform_journey(rows=rows, reverse_path=reverse)
|
||||
|
||||
#
|
||||
# def __compute_weekly_percentage(rows):
|
||||
|
|
|
|||
|
|
@ -4,7 +4,9 @@ from typing import List, Union
|
|||
import schemas
|
||||
from chalicelib.core import events, metadata, projects, performance_event, metrics
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
SESSION_PROJECTION_COLS_CH = """\
|
||||
s.project_id,
|
||||
s.session_id AS session_id,
|
||||
|
|
@ -24,6 +26,7 @@ s.pages_count AS pages_count,
|
|||
s.errors_count AS errors_count,
|
||||
s.user_anonymous_id AS user_anonymous_id,
|
||||
s.platform AS platform,
|
||||
s.timezone AS timezone,
|
||||
coalesce(issue_score,0) AS issue_score,
|
||||
s.issue_types AS issue_types
|
||||
"""
|
||||
|
|
@ -47,6 +50,7 @@ SESSION_PROJECTION_COLS_CH_MAP = """\
|
|||
'errors_count', toString(s.errors_count),
|
||||
'user_anonymous_id', toString(s.user_anonymous_id),
|
||||
'platform', toString(s.platform),
|
||||
'timezone', toString(s.timezone),
|
||||
'issue_score', toString(coalesce(issue_score,0)),
|
||||
'viewed', toString(viewed_sessions.session_id > 0)
|
||||
"""
|
||||
|
|
@ -77,17 +81,6 @@ def __reverse_sql_operator(op):
|
|||
return "=" if op == "!=" else "!=" if op == "=" else "ILIKE" if op == "NOT ILIKE" else "NOT ILIKE"
|
||||
|
||||
|
||||
def __get_sql_operator_multiple(op: schemas.SearchEventOperator):
|
||||
return " IN " if op not in [schemas.SearchEventOperator._is_not, schemas.SearchEventOperator._not_on,
|
||||
schemas.SearchEventOperator._not_contains] else " NOT IN "
|
||||
|
||||
|
||||
def __get_sql_value_multiple(values):
|
||||
if isinstance(values, tuple):
|
||||
return values
|
||||
return tuple(values) if isinstance(values, list) else (values,)
|
||||
|
||||
|
||||
def _multiple_conditions(condition, values, value_key="value", is_not=False):
|
||||
query = []
|
||||
for i in range(len(values)):
|
||||
|
|
@ -133,9 +126,9 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
|
|||
meta_keys = []
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
if errors_only:
|
||||
# print("--------------------QP")
|
||||
# print(cur.format(query_part, full_args))
|
||||
# print("--------------------")
|
||||
logging.debug("--------------------QP")
|
||||
logging.debug(cur.format(query_part, full_args))
|
||||
logging.debug("--------------------")
|
||||
main_query = cur.format(f"""SELECT DISTINCT er.error_id,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM {exp_ch_helper.get_user_viewed_errors_table()} AS ve
|
||||
|
|
@ -212,28 +205,24 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
|
|||
ORDER BY sort_key {data.order}
|
||||
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s) AS sorted_sessions;""",
|
||||
full_args)
|
||||
# print("--------------------")
|
||||
# print(main_query)
|
||||
# print("--------------------")
|
||||
logging.debug("--------------------")
|
||||
logging.debug(main_query)
|
||||
logging.debug("--------------------")
|
||||
try:
|
||||
sessions = cur.execute(main_query)
|
||||
except Exception as err:
|
||||
print("--------- SESSIONS-CH SEARCH QUERY EXCEPTION -----------")
|
||||
print(main_query)
|
||||
print("--------- PAYLOAD -----------")
|
||||
print(data.json())
|
||||
print("--------------------")
|
||||
logging.warning("--------- SESSIONS-CH SEARCH QUERY EXCEPTION -----------")
|
||||
logging.warning(main_query)
|
||||
logging.warning("--------- PAYLOAD -----------")
|
||||
logging.warning(data.json())
|
||||
logging.warning("--------------------")
|
||||
raise err
|
||||
if errors_only or ids_only:
|
||||
return helper.list_to_camel_case(sessions)
|
||||
|
||||
if len(sessions) > 0:
|
||||
sessions = sessions[0]
|
||||
# if count_only:
|
||||
# return helper.dict_to_camel_case(sessions)
|
||||
# for s in sessions:
|
||||
# print(s)
|
||||
# s["session_id"] = str(s["session_id"])
|
||||
|
||||
total = sessions["count"]
|
||||
sessions = sessions["sessions"]
|
||||
|
||||
|
|
@ -294,9 +283,9 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
main_query = cur.format(f"""SELECT count(DISTINCT s.session_id) AS count
|
||||
{query_part};""", full_args)
|
||||
|
||||
# print("--------------------")
|
||||
# print(main_query)
|
||||
# print("--------------------")
|
||||
logging.debug("--------------------")
|
||||
logging.debug(main_query)
|
||||
logging.debug("--------------------")
|
||||
sessions = cur.execute(main_query)
|
||||
if view_type == schemas.MetricTimeseriesViewType.line_chart:
|
||||
sessions = metrics.__complete_missing_steps(start_time=data.startTimestamp, end_time=data.endTimestamp,
|
||||
|
|
@ -346,9 +335,9 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
ORDER BY session_count DESC
|
||||
LIMIT %(limit_e)s OFFSET %(limit_s)s;""",
|
||||
full_args)
|
||||
print("--------------------")
|
||||
print(main_query)
|
||||
print("--------------------")
|
||||
logging.debug("--------------------")
|
||||
logging.debug(main_query)
|
||||
logging.debug("--------------------")
|
||||
sessions = cur.execute(main_query)
|
||||
# cur.fetchone()
|
||||
count = 0
|
||||
|
|
@ -1035,7 +1024,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
apply = True
|
||||
else:
|
||||
print(f"undefined FETCH filter: {f.type}")
|
||||
logging.warning(f"undefined FETCH filter: {f.type}")
|
||||
if not apply:
|
||||
continue
|
||||
else:
|
||||
|
|
@ -1072,7 +1061,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
_multiple_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
else:
|
||||
print(f"undefined GRAPHQL filter: {f.type}")
|
||||
logging.warning(f"undefined GRAPHQL filter: {f.type}")
|
||||
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
|
||||
else:
|
||||
continue
|
||||
|
|
@ -1426,12 +1415,6 @@ def get_session_user(project_id, user_id):
|
|||
return helper.dict_to_camel_case(data)
|
||||
|
||||
|
||||
def count_all():
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
row = cur.execute(query=f"SELECT COUNT(session_id) AS count FROM {exp_ch_helper.get_main_sessions_table()}")
|
||||
return row.get("count", 0)
|
||||
|
||||
|
||||
def session_exists(project_id, session_id):
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = cur.format(f"""SELECT 1
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se
|
|||
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
|
||||
FROM sessions_notes INNER JOIN users USING (user_id)
|
||||
WHERE {" AND ".join(conditions)}
|
||||
ORDER BY created_at {data.order.value}
|
||||
ORDER BY created_at {data.order}
|
||||
LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""",
|
||||
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params})
|
||||
|
||||
|
|
|
|||
|
|
@ -216,9 +216,9 @@ def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: st
|
|||
@app.post('/{projectId}/cards/{metric_id}/errors', tags=["dashboard"])
|
||||
# @app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"])
|
||||
# @app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"])
|
||||
def get_custom_metric_errors_list(projectId: int, metric_id: int,
|
||||
data: schemas.CardSessionsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
def get_card_errors_list(projectId: int, metric_id: int,
|
||||
data: schemas.CardSessionsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id,
|
||||
metric_id=metric_id, data=data)
|
||||
if data is None:
|
||||
|
|
@ -241,8 +241,8 @@ def get_card_chart(projectId: int, metric_id: int, request: Request, data: schem
|
|||
# @app.put('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
|
||||
# @app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
|
||||
# @app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
|
||||
def update_custom_metric(projectId: int, metric_id: int, data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
def update_card(projectId: int, metric_id: int, data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = custom_metrics.update_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
|
||||
if data is None:
|
||||
return {"errors": ["custom metric not found"]}
|
||||
|
|
@ -254,9 +254,9 @@ def update_custom_metric(projectId: int, metric_id: int, data: schemas.CardSchem
|
|||
# @app.put('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"])
|
||||
# @app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"])
|
||||
# @app.put('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"])
|
||||
def update_custom_metric_state(projectId: int, metric_id: int,
|
||||
data: schemas.UpdateCardStatusSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
def update_card_state(projectId: int, metric_id: int,
|
||||
data: schemas.UpdateCardStatusSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {
|
||||
"data": custom_metrics.change_state(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
|
||||
status=data.active)}
|
||||
|
|
@ -265,6 +265,6 @@ def update_custom_metric_state(projectId: int, metric_id: int,
|
|||
@app.delete('/{projectId}/cards/{metric_id}', tags=["dashboard"])
|
||||
# @app.delete('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
|
||||
# @app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
|
||||
def delete_custom_metric(projectId: int, metric_id: int, _=Body(None),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
def delete_card(projectId: int, metric_id: int, _=Body(None),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": custom_metrics.delete_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id)}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue