Merge branch 'dev' into live-se-red
This commit is contained in:
commit
2c82ed81cc
90 changed files with 2715 additions and 2340 deletions
|
|
@ -1,8 +1,8 @@
|
|||
from chalicelib.utils import pg_client
|
||||
from chalicelib.core import projects
|
||||
from chalicelib.core.log_tools import datadog, stackdriver, sentry
|
||||
|
||||
from chalicelib.core import users
|
||||
from chalicelib.core.log_tools import datadog, stackdriver, sentry
|
||||
from chalicelib.core.modules import TENANT_CONDITION
|
||||
from chalicelib.utils import pg_client
|
||||
|
||||
|
||||
def get_state(tenant_id):
|
||||
|
|
@ -21,21 +21,23 @@ def get_state(tenant_id):
|
|||
recorded = cur.fetchone()["exists"]
|
||||
meta = False
|
||||
if recorded:
|
||||
cur.execute("""SELECT EXISTS((SELECT 1
|
||||
query = cur.mogrify("""SELECT EXISTS((SELECT 1
|
||||
FROM public.projects AS p
|
||||
LEFT JOIN LATERAL ( SELECT 1
|
||||
FROM public.sessions
|
||||
WHERE sessions.project_id = p.project_id
|
||||
AND sessions.user_id IS NOT NULL
|
||||
LIMIT 1) AS sessions(user_id) ON (TRUE)
|
||||
WHERE p.deleted_at ISNULL
|
||||
WHERE {TENANT_CONDITION} AND p.deleted_at ISNULL
|
||||
AND ( sessions.user_id IS NOT NULL OR p.metadata_1 IS NOT NULL
|
||||
OR p.metadata_2 IS NOT NULL OR p.metadata_3 IS NOT NULL
|
||||
OR p.metadata_4 IS NOT NULL OR p.metadata_5 IS NOT NULL
|
||||
OR p.metadata_6 IS NOT NULL OR p.metadata_7 IS NOT NULL
|
||||
OR p.metadata_8 IS NOT NULL OR p.metadata_9 IS NOT NULL
|
||||
OR p.metadata_10 IS NOT NULL )
|
||||
)) AS exists;""")
|
||||
)) AS exists;""",
|
||||
{"tenant_id": tenant_id})
|
||||
cur.execute(query)
|
||||
|
||||
meta = cur.fetchone()["exists"]
|
||||
|
||||
|
|
@ -78,21 +80,23 @@ def get_state_installing(tenant_id):
|
|||
|
||||
def get_state_identify_users(tenant_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute("""SELECT EXISTS((SELECT 1
|
||||
query = cur.mogrify(f"""SELECT EXISTS((SELECT 1
|
||||
FROM public.projects AS p
|
||||
LEFT JOIN LATERAL ( SELECT 1
|
||||
FROM public.sessions
|
||||
WHERE sessions.project_id = p.project_id
|
||||
AND sessions.user_id IS NOT NULL
|
||||
LIMIT 1) AS sessions(user_id) ON (TRUE)
|
||||
WHERE p.deleted_at ISNULL
|
||||
WHERE {TENANT_CONDITION} AND p.deleted_at ISNULL
|
||||
AND ( sessions.user_id IS NOT NULL OR p.metadata_1 IS NOT NULL
|
||||
OR p.metadata_2 IS NOT NULL OR p.metadata_3 IS NOT NULL
|
||||
OR p.metadata_4 IS NOT NULL OR p.metadata_5 IS NOT NULL
|
||||
OR p.metadata_6 IS NOT NULL OR p.metadata_7 IS NOT NULL
|
||||
OR p.metadata_8 IS NOT NULL OR p.metadata_9 IS NOT NULL
|
||||
OR p.metadata_10 IS NOT NULL )
|
||||
)) AS exists;""")
|
||||
)) AS exists;""",
|
||||
{"tenant_id": tenant_id})
|
||||
cur.execute(query)
|
||||
|
||||
meta = cur.fetchone()["exists"]
|
||||
|
||||
|
|
|
|||
12
api/chalicelib/core/errors/__init__.py
Normal file
12
api/chalicelib/core/errors/__init__.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental error search")
|
||||
from . import errors as errors_legacy
|
||||
from . import errors_ch as errors
|
||||
else:
|
||||
from . import errors
|
||||
|
|
@ -1,7 +1,8 @@
|
|||
import json
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import sourcemaps, sessions
|
||||
from chalicelib.core import sourcemaps
|
||||
from chalicelib.core.errors.modules import sessions
|
||||
from chalicelib.utils import errors_helper
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import errors_legacy
|
||||
from . import errors as errors_legacy
|
||||
from chalicelib.core import metrics, metadata
|
||||
from chalicelib.core import sessions
|
||||
from chalicelib.utils import ch_client, exp_ch_helper
|
||||
|
|
@ -151,9 +151,6 @@ def __process_tags_map(row):
|
|||
|
||||
|
||||
def get_details(project_id, error_id, user_id, **data):
|
||||
if not config("EXP_ERRORS_GET", cast=bool, default=False):
|
||||
return errors_legacy.get_details(project_id, error_id, user_id, **data)
|
||||
|
||||
MAIN_SESSIONS_TABLE = exp_ch_helper.get_main_sessions_table(0)
|
||||
MAIN_ERR_SESS_TABLE = exp_ch_helper.get_main_js_errors_sessions_table(0)
|
||||
MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(0)
|
||||
|
|
@ -167,7 +164,6 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
ch_basic_query = __get_basic_constraints(time_constraint=False)
|
||||
ch_basic_query.append("error_id = %(error_id)s")
|
||||
|
||||
|
||||
with ch_client.ClickHouseClient() as ch:
|
||||
data["startDate24"] = TimeUTC.now(-1)
|
||||
data["endDate24"] = TimeUTC.now()
|
||||
10
api/chalicelib/core/errors/modules/__init__.py
Normal file
10
api/chalicelib/core/errors/modules/__init__.py
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
|
||||
from chalicelib.core.sessions import sessions_ch as sessions
|
||||
else:
|
||||
from chalicelib.core.sessions import sessions
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
import schemas
|
||||
from chalicelib.core.issue_tracking.modules import TENANT_CONDITION
|
||||
from chalicelib.core.modules import TENANT_CONDITION
|
||||
from chalicelib.utils import pg_client
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from chalicelib.utils import pg_client, helper
|
||||
import json
|
||||
from chalicelib.core.log_tools.modules import TENANT_CONDITION
|
||||
from chalicelib.core.modules import TENANT_CONDITION
|
||||
|
||||
EXCEPT = ["jira_server", "jira_cloud"]
|
||||
|
||||
|
|
|
|||
|
|
@ -1 +0,0 @@
|
|||
TENANT_CONDITION = "TRUE"
|
||||
15
api/chalicelib/core/metrics/__init__.py
Normal file
15
api/chalicelib/core/metrics/__init__.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if config("EXP_METRICS", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental metrics")
|
||||
from chalicelib.core.metrics import heatmaps_ch as heatmaps
|
||||
from chalicelib.core.metrics import metrics_ch as metrics
|
||||
from chalicelib.core.metrics import product_analytics_ch as product_analytics
|
||||
else:
|
||||
from chalicelib.core.metrics import heatmaps
|
||||
from chalicelib.core.metrics import metrics
|
||||
from chalicelib.core.metrics import product_analytics
|
||||
|
|
@ -4,7 +4,8 @@ import logging
|
|||
from fastapi import HTTPException, status
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import funnels, errors, issues, heatmaps, product_analytics, custom_metrics_predefined
|
||||
from chalicelib.core import errors, issues
|
||||
from chalicelib.core.metrics import heatmaps, product_analytics, funnels, custom_metrics_predefined
|
||||
from chalicelib.core.sessions import sessions
|
||||
from chalicelib.utils import helper, pg_client
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
|
@ -2,7 +2,7 @@ import logging
|
|||
from typing import Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import metrics
|
||||
from chalicelib.core.metrics import metrics
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
import json
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import custom_metrics
|
||||
from chalicelib.core.metrics import custom_metrics
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
from typing import List
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import significance
|
||||
from chalicelib.core.metrics.modules import significance
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
370
api/chalicelib/core/metrics/heatmaps_ch.py
Normal file
370
api/chalicelib/core/metrics/heatmaps_ch.py
Normal file
|
|
@ -0,0 +1,370 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events
|
||||
from chalicelib.core.metrics.modules import sessions, sessions_mobs
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
from chalicelib.utils import pg_client, helper, ch_client, exp_ch_helper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_by_url(project_id, data: schemas.GetHeatMapPayloadSchema):
|
||||
if data.url is None or data.url == "":
|
||||
return []
|
||||
args = {"startDate": data.startTimestamp, "endDate": data.endTimestamp,
|
||||
"project_id": project_id, "url": data.url}
|
||||
constraints = ["main_events.project_id = toUInt16(%(project_id)s)",
|
||||
"main_events.datetime >= toDateTime(%(startDate)s/1000)",
|
||||
"main_events.datetime <= toDateTime(%(endDate)s/1000)",
|
||||
"main_events.event_type='CLICK'",
|
||||
"isNotNull(main_events.normalized_x)"]
|
||||
if data.operator == schemas.SearchEventOperator.IS:
|
||||
constraints.append("url_path= %(url)s")
|
||||
else:
|
||||
constraints.append("url_path ILIKE %(url)s")
|
||||
args["url"] = helper.values_for_operator(data.url, data.operator)
|
||||
|
||||
query_from = f"{exp_ch_helper.get_main_events_table(data.startTimestamp)} AS main_events"
|
||||
# TODO: is this used ?
|
||||
# has_click_rage_filter = False
|
||||
# if len(data.filters) > 0:
|
||||
# for i, f in enumerate(data.filters):
|
||||
# if f.type == schemas.FilterType.issue and len(f.value) > 0:
|
||||
# has_click_rage_filter = True
|
||||
# query_from += """INNER JOIN events_common.issues USING (timestamp, session_id)
|
||||
# INNER JOIN issues AS mis USING (issue_id)
|
||||
# INNER JOIN LATERAL (
|
||||
# SELECT COUNT(1) AS real_count
|
||||
# FROM events.clicks AS sc
|
||||
# INNER JOIN sessions as ss USING (session_id)
|
||||
# WHERE ss.project_id = 2
|
||||
# AND (sc.url = %(url)s OR sc.path = %(url)s)
|
||||
# AND sc.timestamp >= %(startDate)s
|
||||
# AND sc.timestamp <= %(endDate)s
|
||||
# AND ss.start_ts >= %(startDate)s
|
||||
# AND ss.start_ts <= %(endDate)s
|
||||
# AND sc.selector = clicks.selector) AS r_clicks ON (TRUE)"""
|
||||
# constraints += ["mis.project_id = %(project_id)s",
|
||||
# "issues.timestamp >= %(startDate)s",
|
||||
# "issues.timestamp <= %(endDate)s"]
|
||||
# f_k = f"issue_value{i}"
|
||||
# args = {**args, **sh.multi_values(f.value, value_key=f_k)}
|
||||
# constraints.append(sh.multi_conditions(f"%({f_k})s = ANY (issue_types)",
|
||||
# f.value, value_key=f_k))
|
||||
# constraints.append(sh.multi_conditions(f"mis.type = %({f_k})s",
|
||||
# f.value, value_key=f_k))
|
||||
# TODO: change this once click-rage is fixed
|
||||
# if data.click_rage and not has_click_rage_filter:
|
||||
# constraints.append("""(issues_t.session_id IS NULL
|
||||
# OR (issues_t.datetime >= toDateTime(%(startDate)s/1000)
|
||||
# AND issues_t.datetime <= toDateTime(%(endDate)s/1000)
|
||||
# AND issues_t.project_id = toUInt16(%(project_id)s)
|
||||
# AND issues_t.event_type = 'ISSUE'
|
||||
# AND issues_t.project_id = toUInt16(%(project_id)s)
|
||||
# AND mis.project_id = toUInt16(%(project_id)s)
|
||||
# AND mis.type='click_rage'))""")
|
||||
# query_from += """ LEFT JOIN experimental.events AS issues_t ON (main_events.session_id=issues_t.session_id)
|
||||
# LEFT JOIN experimental.issues AS mis ON (issues_t.issue_id=mis.issue_id)"""
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = cur.format(query=f"""SELECT main_events.normalized_x AS normalized_x,
|
||||
main_events.normalized_y AS normalized_y
|
||||
FROM {query_from}
|
||||
WHERE {" AND ".join(constraints)}
|
||||
LIMIT 500;""",
|
||||
parameters=args)
|
||||
logger.debug("---------")
|
||||
logger.debug(query)
|
||||
logger.debug("---------")
|
||||
try:
|
||||
rows = cur.execute(query)
|
||||
except Exception as err:
|
||||
logger.warning("--------- HEATMAP 2 SEARCH QUERY EXCEPTION CH -----------")
|
||||
logger.warning(query)
|
||||
logger.warning("--------- PAYLOAD -----------")
|
||||
logger.warning(data)
|
||||
logger.warning("--------------------")
|
||||
raise err
|
||||
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
def get_x_y_by_url_and_session_id(project_id, session_id, data: schemas.GetHeatMapPayloadSchema):
|
||||
args = {"project_id": project_id, "session_id": session_id, "url": data.url}
|
||||
constraints = ["main_events.project_id = toUInt16(%(project_id)s)",
|
||||
"main_events.session_id = %(session_id)s",
|
||||
"main_events.event_type='CLICK'",
|
||||
"isNotNull(main_events.normalized_x)"]
|
||||
if data.operator == schemas.SearchEventOperator.IS:
|
||||
constraints.append("main_events.url_path = %(url)s")
|
||||
else:
|
||||
constraints.append("main_events.url_path ILIKE %(url)s")
|
||||
args["url"] = helper.values_for_operator(data.url, data.operator)
|
||||
|
||||
query_from = f"{exp_ch_helper.get_main_events_table(0)} AS main_events"
|
||||
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = cur.format(query=f"""SELECT main_events.normalized_x AS normalized_x,
|
||||
main_events.normalized_y AS normalized_y
|
||||
FROM {query_from}
|
||||
WHERE {" AND ".join(constraints)};""",
|
||||
parameters=args)
|
||||
logger.debug("---------")
|
||||
logger.debug(query)
|
||||
logger.debug("---------")
|
||||
try:
|
||||
rows = cur.execute(query)
|
||||
except Exception as err:
|
||||
logger.warning("--------- HEATMAP-session_id SEARCH QUERY EXCEPTION CH -----------")
|
||||
logger.warning(query)
|
||||
logger.warning("--------- PAYLOAD -----------")
|
||||
logger.warning(data)
|
||||
logger.warning("--------------------")
|
||||
raise err
|
||||
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
def get_selectors_by_url_and_session_id(project_id, session_id, data: schemas.GetHeatMapPayloadSchema):
|
||||
args = {"project_id": project_id, "session_id": session_id, "url": data.url}
|
||||
constraints = ["main_events.project_id = toUInt16(%(project_id)s)",
|
||||
"main_events.session_id = %(session_id)s",
|
||||
"main_events.event_type='CLICK'"]
|
||||
if data.operator == schemas.SearchEventOperator.IS:
|
||||
constraints.append("main_events.url_path = %(url)s")
|
||||
else:
|
||||
constraints.append("main_events.url_path ILIKE %(url)s")
|
||||
args["url"] = helper.values_for_operator(data.url, data.operator)
|
||||
|
||||
query_from = f"{exp_ch_helper.get_main_events_table(0)} AS main_events"
|
||||
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = cur.format(query=f"""SELECT main_events.selector AS selector,
|
||||
COUNT(1) AS count
|
||||
FROM {query_from}
|
||||
WHERE {" AND ".join(constraints)}
|
||||
GROUP BY 1
|
||||
ORDER BY count DESC;""",
|
||||
parameters=args)
|
||||
logger.debug("---------")
|
||||
logger.debug(query)
|
||||
logger.debug("---------")
|
||||
try:
|
||||
rows = cur.execute(query)
|
||||
except Exception as err:
|
||||
logger.warning("--------- HEATMAP-session_id SEARCH QUERY EXCEPTION CH -----------")
|
||||
logger.warning(query)
|
||||
logger.warning("--------- PAYLOAD -----------")
|
||||
logger.warning(data)
|
||||
logger.warning("--------------------")
|
||||
raise err
|
||||
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
# use CH
|
||||
SESSION_PROJECTION_COLS = """s.project_id,
|
||||
s.session_id AS session_id,
|
||||
toUnixTimestamp(s.datetime)*1000 AS start_ts,
|
||||
s.duration AS duration"""
|
||||
|
||||
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema2 | None, session_id: str, project_id: int,
|
||||
start_time: int,
|
||||
end_time: int) -> str | None:
|
||||
full_args = {
|
||||
"sessionId": session_id,
|
||||
"projectId": project_id,
|
||||
"start_time": start_time,
|
||||
"end_time": end_time,
|
||||
}
|
||||
sub_condition = ["session_id = %(sessionId)s", "event_type = 'CLICK'", "project_id = %(projectId)s"]
|
||||
if location_condition and len(location_condition.value) > 0:
|
||||
f_k = "LOC"
|
||||
op = sh.get_sql_operator(location_condition.operator)
|
||||
full_args = {**full_args, **sh.multi_values(location_condition.value, value_key=f_k)}
|
||||
sub_condition.append(
|
||||
sh.multi_conditions(f'path {op} %({f_k})s', location_condition.value, is_not=False,
|
||||
value_key=f_k))
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
main_query = cur.format(query=f"""WITH paths AS (SELECT DISTINCT url_path
|
||||
FROM experimental.events
|
||||
WHERE {" AND ".join(sub_condition)})
|
||||
SELECT url_path, COUNT(1) AS count
|
||||
FROM experimental.events
|
||||
INNER JOIN paths USING (url_path)
|
||||
WHERE event_type = 'CLICK'
|
||||
AND project_id = %(projectId)s
|
||||
AND datetime >= toDateTime(%(start_time)s / 1000)
|
||||
AND datetime <= toDateTime(%(end_time)s / 1000)
|
||||
GROUP BY url_path
|
||||
ORDER BY count DESC
|
||||
LIMIT 1;""",
|
||||
parameters=full_args)
|
||||
logger.debug("--------------------")
|
||||
logger.debug(main_query)
|
||||
logger.debug("--------------------")
|
||||
try:
|
||||
url = cur.execute(main_query)
|
||||
except Exception as err:
|
||||
logger.warning("--------- CLICK MAP BEST URL SEARCH QUERY EXCEPTION CH-----------")
|
||||
logger.warning(main_query.decode('UTF-8'))
|
||||
logger.warning("--------- PAYLOAD -----------")
|
||||
logger.warning(full_args)
|
||||
logger.warning("--------------------")
|
||||
raise err
|
||||
|
||||
if url is None or len(url) == 0:
|
||||
return None
|
||||
return url[0]["url_path"]
|
||||
|
||||
|
||||
def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_id,
|
||||
include_mobs: bool = True, exclude_sessions: list[str] = [],
|
||||
_depth: int = 3):
|
||||
no_platform = True
|
||||
location_condition = None
|
||||
no_click = True
|
||||
for f in data.filters:
|
||||
if f.type == schemas.FilterType.PLATFORM:
|
||||
no_platform = False
|
||||
break
|
||||
for f in data.events:
|
||||
if f.type == schemas.EventType.LOCATION:
|
||||
if len(f.value) == 0:
|
||||
f.operator = schemas.SearchEventOperator.IS_ANY
|
||||
location_condition = f.model_copy()
|
||||
elif f.type == schemas.EventType.CLICK:
|
||||
no_click = False
|
||||
if len(f.value) == 0:
|
||||
f.operator = schemas.SearchEventOperator.IS_ANY
|
||||
if location_condition and not no_click:
|
||||
break
|
||||
|
||||
if no_platform:
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.PLATFORM,
|
||||
value=[schemas.PlatformType.DESKTOP],
|
||||
operator=schemas.SearchEventOperator.IS))
|
||||
if not location_condition:
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
if no_click:
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
|
||||
value=[0],
|
||||
operator=schemas.MathOperator.GREATER))
|
||||
|
||||
full_args, query_part = sessions.search_query_parts_ch(data=data, error_status=None, errors_only=False,
|
||||
favorite_only=data.bookmarked, issue=None,
|
||||
project_id=project_id, user_id=user_id)
|
||||
full_args["exclude_sessions"] = tuple(exclude_sessions)
|
||||
if len(exclude_sessions) > 0:
|
||||
query_part += "\n AND session_id NOT IN (%(exclude_sessions)s)"
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
data.order = schemas.SortOrderType.DESC
|
||||
data.sort = 'duration'
|
||||
main_query = cur.format(query=f"""SELECT *
|
||||
FROM (SELECT {SESSION_PROJECTION_COLS}
|
||||
{query_part}
|
||||
-- ORDER BY {data.sort} {data.order.value}
|
||||
LIMIT 20) AS raw
|
||||
ORDER BY rand()
|
||||
LIMIT 1;""",
|
||||
parameters=full_args)
|
||||
logger.debug("--------------------")
|
||||
logger.debug(main_query)
|
||||
logger.debug("--------------------")
|
||||
try:
|
||||
session = cur.execute(main_query)
|
||||
except Exception as err:
|
||||
logger.warning("--------- CLICK MAP SHORT SESSION SEARCH QUERY EXCEPTION CH -----------")
|
||||
logger.warning(main_query)
|
||||
logger.warning("--------- PAYLOAD -----------")
|
||||
logger.warning(data.model_dump_json())
|
||||
logger.warning("--------------------")
|
||||
raise err
|
||||
|
||||
if len(session) > 0:
|
||||
session = session[0]
|
||||
if not location_condition or location_condition.operator == schemas.SearchEventOperator.IS_ANY:
|
||||
session["path"] = __get_1_url(project_id=project_id, session_id=session["session_id"],
|
||||
location_condition=location_condition,
|
||||
start_time=data.startTimestamp, end_time=data.endTimestamp)
|
||||
else:
|
||||
session["path"] = location_condition.value[0]
|
||||
|
||||
if include_mobs:
|
||||
session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id)
|
||||
session['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session["session_id"])
|
||||
if _depth > 0 and len(session['domURL']) == 0 and len(session['mobsUrl']) == 0:
|
||||
return search_short_session(data=data, project_id=project_id, user_id=user_id,
|
||||
include_mobs=include_mobs,
|
||||
exclude_sessions=exclude_sessions + [session["session_id"]],
|
||||
_depth=_depth - 1)
|
||||
elif _depth == 0 and len(session['domURL']) == 0 and len(session['mobsUrl']) == 0:
|
||||
logger.info("couldn't find an existing replay after 3 iterations for heatmap")
|
||||
|
||||
session['events'] = events.get_by_session_id(project_id=project_id, session_id=session["session_id"],
|
||||
event_type=schemas.EventType.LOCATION)
|
||||
else:
|
||||
return None
|
||||
|
||||
return helper.dict_to_camel_case(session)
|
||||
|
||||
|
||||
def get_selected_session(project_id, session_id):
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
main_query = cur.format(query=f"""SELECT {SESSION_PROJECTION_COLS}
|
||||
FROM experimental.sessions AS s
|
||||
WHERE session_id=%(session_id)s;""",
|
||||
parameters={"session_id": session_id})
|
||||
logger.debug("--------------------")
|
||||
logger.debug(main_query)
|
||||
logger.debug("--------------------")
|
||||
try:
|
||||
session = cur.execute(main_query)
|
||||
except Exception as err:
|
||||
logger.warning("--------- CLICK MAP GET SELECTED SESSION QUERY EXCEPTION -----------")
|
||||
logger.warning(main_query.decode('UTF-8'))
|
||||
raise err
|
||||
if len(session) > 0:
|
||||
session = session[0]
|
||||
else:
|
||||
session = None
|
||||
|
||||
if session:
|
||||
session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id)
|
||||
session['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session["session_id"])
|
||||
if len(session['domURL']) == 0 and len(session['mobsUrl']) == 0:
|
||||
session["_issue"] = "mob file not found"
|
||||
logger.info("can't find selected mob file for heatmap")
|
||||
session['events'] = get_page_events(session_id=session["session_id"], project_id=project_id)
|
||||
|
||||
return helper.dict_to_camel_case(session)
|
||||
|
||||
|
||||
def get_page_events(session_id, project_id):
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
rows = cur.execute("""\
|
||||
SELECT
|
||||
message_id,
|
||||
toUnixTimestamp(datetime)*1000 AS timestamp,
|
||||
url_host AS host,
|
||||
url_path AS path,
|
||||
url_path AS value,
|
||||
url_path AS url,
|
||||
'LOCATION' AS type
|
||||
FROM experimental.events
|
||||
WHERE session_id = %(session_id)s
|
||||
AND event_type='LOCATION'
|
||||
AND project_id= %(project_id)s
|
||||
ORDER BY datetime,message_id;""", {"session_id": session_id, "project_id": project_id})
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
return rows
|
||||
12
api/chalicelib/core/metrics/modules/__init__.py
Normal file
12
api/chalicelib/core/metrics/modules/__init__.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if config("EXP_METRICS", cast=bool, default=False):
|
||||
from chalicelib.core.sessions import sessions_ch as sessions
|
||||
else:
|
||||
from chalicelib.core.sessions import sessions
|
||||
|
||||
from chalicelib.core.sessions import sessions_mobs
|
||||
10
api/chalicelib/core/metrics/modules/significance/__init__.py
Normal file
10
api/chalicelib/core/metrics/modules/significance/__init__.py
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from .significance import *
|
||||
|
||||
if config("EXP_METRICS", cast=bool, default=False):
|
||||
from .significance_ch import *
|
||||
|
|
@ -1,20 +1,15 @@
|
|||
import logging
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
"""
|
||||
todo: remove LIMIT from the query
|
||||
"""
|
||||
|
||||
from typing import List
|
||||
import math
|
||||
import warnings
|
||||
from collections import defaultdict
|
||||
from typing import List
|
||||
|
||||
from psycopg2.extras import RealDictRow
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
SIGNIFICANCE_THRSH = 0.4
|
||||
|
|
@ -1,6 +1,14 @@
|
|||
import logging
|
||||
from typing import List
|
||||
|
||||
from psycopg2.extras import RealDictRow
|
||||
|
||||
import schemas
|
||||
from chalicelib.utils import ch_client
|
||||
from chalicelib.utils import exp_ch_helper
|
||||
from .significance import *
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.core import events
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -202,7 +210,7 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
|
|||
sequences = []
|
||||
projections = []
|
||||
for i, s in enumerate(n_stages_query):
|
||||
projections.append(f"SUM(T{i + 1}) AS stage{i + 1}")
|
||||
projections.append(f"coalesce(SUM(T{i + 1}),0) AS stage{i + 1}")
|
||||
if i == 0:
|
||||
sequences.append(f"anyIf(1,{s}) AS T1")
|
||||
else:
|
||||
|
|
@ -220,11 +228,10 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
|
|||
FROM (SELECT {",".join(sequences)}
|
||||
FROM {MAIN_EVENTS_TABLE} AS e {extra_from}
|
||||
WHERE {" AND ".join(constraints)}
|
||||
GROUP BY {group_by}) AS raw;
|
||||
"""
|
||||
GROUP BY {group_by}) AS raw;"""
|
||||
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = cur.format(n_stages_query, full_args)
|
||||
query = cur.format(query=n_stages_query, parameters=full_args)
|
||||
logger.debug("---------------------------------------------------")
|
||||
logger.debug(query)
|
||||
logger.debug("---------------------------------------------------")
|
||||
1362
api/chalicelib/core/metrics/product_analytics_ch.py
Normal file
1362
api/chalicelib/core/metrics/product_analytics_ch.py
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -1 +1,2 @@
|
|||
TENANT_CONDITION = "TRUE"
|
||||
MOB_KEY=""
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.utils.storage import StorageClient
|
||||
|
||||
|
||||
|
|
@ -13,7 +14,7 @@ def __get_devtools_keys(project_id, session_id):
|
|||
]
|
||||
|
||||
|
||||
def get_urls(session_id, project_id, check_existence: bool = True):
|
||||
def get_urls(session_id, project_id, context: schemas.CurrentContext, check_existence: bool = True):
|
||||
results = []
|
||||
for k in __get_devtools_keys(project_id=project_id, session_id=session_id):
|
||||
if check_existence and not StorageClient.exists(bucket=config("sessions_bucket"), key=k):
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from chalicelib.core import events, metadata, events_mobile, \
|
|||
from chalicelib.core.sessions import sessions_mobs, sessions_devtool
|
||||
from chalicelib.utils import errors_helper
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.core.modules import MOB_KEY
|
||||
|
||||
|
||||
def __is_mobile_session(platform):
|
||||
|
|
@ -42,6 +43,7 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat
|
|||
SELECT
|
||||
s.*,
|
||||
s.session_id::text AS session_id,
|
||||
{MOB_KEY}
|
||||
(SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key
|
||||
{"," if len(extra_query) > 0 else ""}{",".join(extra_query)}
|
||||
{(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''}
|
||||
|
|
@ -63,7 +65,7 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat
|
|||
else:
|
||||
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False)
|
||||
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
|
||||
check_existence=False)
|
||||
context=context, check_existence=False)
|
||||
data['canvasURL'] = canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id)
|
||||
if user_testing.has_test_signals(session_id=session_id, project_id=project_id):
|
||||
data['utxVideo'] = user_testing.get_ux_webcam_signed_url(session_id=session_id,
|
||||
|
|
|
|||
0
api/chalicelib/core/sourcemaps/__init__.py
Normal file
0
api/chalicelib/core/sourcemaps/__init__.py
Normal file
|
|
@ -3,7 +3,7 @@ from urllib.parse import urlparse
|
|||
import requests
|
||||
from decouple import config
|
||||
|
||||
from chalicelib.core import sourcemaps_parser
|
||||
from chalicelib.core.sourcemaps import sourcemaps_parser
|
||||
from chalicelib.utils.storage import StorageClient, generators
|
||||
|
||||
|
||||
|
|
@ -35,7 +35,6 @@ if config("CH_COMPRESSION", cast=bool, default=True):
|
|||
def transform_result(original_function):
|
||||
@wraps(original_function)
|
||||
def wrapper(*args, **kwargs):
|
||||
logger.info("Executing query on CH")
|
||||
result = original_function(*args, **kwargs)
|
||||
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
|
||||
column_names = result.column_names
|
||||
|
|
|
|||
|
|
@ -72,4 +72,6 @@ STAGE=default-foss
|
|||
TZ=UTC
|
||||
EXP_CH_DRIVER=true
|
||||
EXP_AUTOCOMPLETE=true
|
||||
EXP_ALERTS=true
|
||||
EXP_ALERTS=true
|
||||
EXP_ERRORS_SEARCH=true
|
||||
EXP_METRICS=true
|
||||
|
|
@ -8,7 +8,8 @@ from chalicelib.core import sourcemaps, events, projects, alerts, issues, \
|
|||
metadata, reset_password, \
|
||||
log_tools, sessions, announcements, \
|
||||
weekly_report, assist, mobile, tenants, boarding, \
|
||||
notifications, webhook, users, custom_metrics, saved_search, tags, autocomplete
|
||||
notifications, webhook, users, saved_search, tags, autocomplete
|
||||
from chalicelib.core.metrics import custom_metrics
|
||||
from chalicelib.core.issue_tracking import github, integrations_global, integrations_manager, \
|
||||
jira_cloud
|
||||
from chalicelib.core.log_tools import datadog, newrelic, stackdriver, elasticsearch, \
|
||||
|
|
|
|||
|
|
@ -8,8 +8,9 @@ from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Re
|
|||
|
||||
import schemas
|
||||
from chalicelib.core import scope
|
||||
from chalicelib.core import errors, errors_viewed, errors_favorite, heatmaps, \
|
||||
assist, signup, feature_flags
|
||||
from chalicelib.core import errors, assist, signup, feature_flags
|
||||
from chalicelib.core.metrics import heatmaps
|
||||
from chalicelib.core.errors import errors_favorite, errors_viewed
|
||||
from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_viewed, \
|
||||
sessions_assignments, unprocessed_sessions
|
||||
from chalicelib.core import tenants, users, projects, license
|
||||
|
|
|
|||
|
|
@ -1,7 +1,3 @@
|
|||
from fastapi import Body
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import product_analytics
|
||||
from routers.base import get_routers
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from typing import Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import dashboards, custom_metrics
|
||||
from chalicelib.core.metrics import custom_metrics, dashboards
|
||||
from fastapi import Body, Depends
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
from typing import Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import product_anaytics2
|
||||
from fastapi import Body, Depends
|
||||
from chalicelib.core.metrics import product_anaytics2
|
||||
from fastapi import Depends
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
|
||||
|
|
|
|||
|
|
@ -55,8 +55,12 @@ require (
|
|||
github.com/distribution/reference v0.6.0 // indirect
|
||||
github.com/elastic/elastic-transport-go/v8 v8.5.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
|
||||
github.com/go-logr/logr v1.4.1 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/go-playground/validator/v10 v10.23.0 // indirect
|
||||
github.com/goccy/go-json v0.10.2 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
||||
github.com/golang/protobuf v1.5.4 // indirect
|
||||
|
|
@ -70,6 +74,7 @@ require (
|
|||
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
|
||||
github.com/jackc/puddle v1.3.0 // indirect
|
||||
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
|
||||
github.com/paulmach/orb v0.7.1 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.15 // indirect
|
||||
|
|
|
|||
|
|
@ -171,6 +171,8 @@ github.com/fsnotify/fsevents v0.1.1/go.mod h1:+d+hS27T6k5J8CRaPLKFgwKYcpS7GwW3Ul
|
|||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM45eo=
|
||||
github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0=
|
||||
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
|
||||
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
|
||||
github.com/getsentry/sentry-go v0.29.0 h1:YtWluuCFg9OfcqnaujpY918N/AhCCwarIDWOYSBAjCA=
|
||||
github.com/getsentry/sentry-go v0.29.0/go.mod h1:jhPesDAL0Q0W2+2YEuVOvdWmVtdsr1+jtBrlDEVWwLY=
|
||||
github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA=
|
||||
|
|
@ -192,6 +194,12 @@ github.com/go-openapi/jsonreference v0.20.0 h1:MYlu0sBgChmCfJxxUKZ8g1cPWFOB37YSZ
|
|||
github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo=
|
||||
github.com/go-openapi/swag v0.19.14 h1:gm3vOOXfiuw5i9p5N9xJvfjvuofpyvLA9Wr6QfK5Fng=
|
||||
github.com/go-openapi/swag v0.19.14/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
|
||||
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
||||
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
||||
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.23.0 h1:/PwmTwZhS0dPkav3cdK9kV1FsAmrL8sThn8IHr/sO+o=
|
||||
github.com/go-playground/validator/v10 v10.23.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
|
||||
github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg=
|
||||
github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA=
|
||||
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
||||
|
|
@ -364,6 +372,8 @@ github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
|||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
||||
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
|
||||
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
|
|
|
|||
268
backend/pkg/analytics/api/card-handlers.go
Normal file
268
backend/pkg/analytics/api/card-handlers.go
Normal file
|
|
@ -0,0 +1,268 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/gorilla/mux"
|
||||
"net/http"
|
||||
"openreplay/backend/pkg/server/api"
|
||||
"openreplay/backend/pkg/server/user"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/go-playground/validator/v10"
|
||||
)
|
||||
|
||||
// getCardId returns the ID from the request
|
||||
func getCardId(r *http.Request) (int64, error) {
|
||||
vars := mux.Vars(r)
|
||||
idStr := vars["id"]
|
||||
if idStr == "" {
|
||||
return 0, fmt.Errorf("invalid Card ID")
|
||||
}
|
||||
|
||||
id, err := strconv.ParseInt(idStr, 10, 64)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("invalid Card ID")
|
||||
}
|
||||
|
||||
return id, nil
|
||||
}
|
||||
|
||||
func (e *handlersImpl) createCard(w http.ResponseWriter, r *http.Request) {
|
||||
startTime := time.Now()
|
||||
bodySize := 0
|
||||
|
||||
bodyBytes, err := api.ReadBody(e.log, w, r, e.jsonSizeLimit)
|
||||
if err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusRequestEntityTooLarge, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
bodySize = len(bodyBytes)
|
||||
|
||||
req := &CardCreateRequest{}
|
||||
if err := json.Unmarshal(bodyBytes, req); err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
||||
validate := validator.New()
|
||||
err = validate.Struct(req)
|
||||
if err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
||||
// TODO save card to DB
|
||||
|
||||
resp := &CardGetResponse{
|
||||
Card: Card{
|
||||
CardID: 1,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
DeletedAt: nil,
|
||||
EditedAt: nil,
|
||||
ProjectID: 1,
|
||||
UserID: 1,
|
||||
CardBase: CardBase{
|
||||
Name: req.Name,
|
||||
IsPublic: req.IsPublic,
|
||||
Thumbnail: req.Thumbnail,
|
||||
MetricType: req.MetricType,
|
||||
MetricOf: req.MetricOf,
|
||||
Series: req.Series,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
currentUser := r.Context().Value("userData").(*user.User)
|
||||
e.log.Info(r.Context(), "User ID: ", currentUser.ID)
|
||||
|
||||
e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize)
|
||||
}
|
||||
|
||||
// getCard
|
||||
func (e *handlersImpl) getCard(w http.ResponseWriter, r *http.Request) {
|
||||
startTime := time.Now()
|
||||
bodySize := 0
|
||||
|
||||
id, err := getCardId(r)
|
||||
if err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
||||
thumbnail := "https://example.com/image.png"
|
||||
|
||||
// TODO get card from DB
|
||||
|
||||
resp := &CardGetResponse{
|
||||
Card: Card{
|
||||
CardID: id,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
DeletedAt: nil,
|
||||
EditedAt: nil,
|
||||
ProjectID: 1,
|
||||
UserID: 1,
|
||||
CardBase: CardBase{
|
||||
Name: "My Card",
|
||||
IsPublic: true,
|
||||
Thumbnail: &thumbnail,
|
||||
MetricType: "timeseries",
|
||||
MetricOf: "session_count",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize)
|
||||
}
|
||||
|
||||
// get cards paginated
|
||||
func (e *handlersImpl) getCards(w http.ResponseWriter, r *http.Request) {
|
||||
startTime := time.Now()
|
||||
bodySize := 0
|
||||
|
||||
// TODO get cards from DB
|
||||
thumbnail := "https://example.com/image.png"
|
||||
|
||||
resp := &GetCardsResponse{
|
||||
Cards: []Card{
|
||||
{
|
||||
CardID: 1,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
DeletedAt: nil,
|
||||
EditedAt: nil,
|
||||
ProjectID: 1,
|
||||
UserID: 1,
|
||||
CardBase: CardBase{
|
||||
Name: "My Card",
|
||||
IsPublic: true,
|
||||
Thumbnail: &thumbnail,
|
||||
MetricType: "timeseries",
|
||||
MetricOf: "session_count",
|
||||
},
|
||||
},
|
||||
},
|
||||
Total: 10,
|
||||
}
|
||||
|
||||
e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize)
|
||||
}
|
||||
|
||||
func (e *handlersImpl) updateCard(w http.ResponseWriter, r *http.Request) {
|
||||
startTime := time.Now()
|
||||
bodySize := 0
|
||||
|
||||
id, err := getCardId(r)
|
||||
if err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
||||
bodyBytes, err := api.ReadBody(e.log, w, r, e.jsonSizeLimit)
|
||||
if err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusRequestEntityTooLarge, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
bodySize = len(bodyBytes)
|
||||
|
||||
req := &CardUpdateRequest{}
|
||||
if err := json.Unmarshal(bodyBytes, req); err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
||||
validate := validator.New()
|
||||
err = validate.Struct(req)
|
||||
if err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
||||
// TODO update card in DB
|
||||
|
||||
resp := &CardGetResponse{
|
||||
Card: Card{
|
||||
CardID: id,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
DeletedAt: nil,
|
||||
EditedAt: nil,
|
||||
ProjectID: 1,
|
||||
UserID: 1,
|
||||
CardBase: CardBase{
|
||||
Name: req.Name,
|
||||
IsPublic: req.IsPublic,
|
||||
Thumbnail: req.Thumbnail,
|
||||
MetricType: req.MetricType,
|
||||
MetricOf: req.MetricOf,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize)
|
||||
}
|
||||
|
||||
func (e *handlersImpl) deleteCard(w http.ResponseWriter, r *http.Request) {
|
||||
startTime := time.Now()
|
||||
bodySize := 0
|
||||
|
||||
_, err := getCardId(r)
|
||||
if err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
||||
// TODO delete card from DB
|
||||
|
||||
e.responser.ResponseWithJSON(e.log, r.Context(), w, nil, startTime, r.URL.Path, bodySize)
|
||||
}
|
||||
|
||||
func (e *handlersImpl) getCardChartData(w http.ResponseWriter, r *http.Request) {
|
||||
startTime := time.Now()
|
||||
bodySize := 0
|
||||
|
||||
bodyBytes, err := api.ReadBody(e.log, w, r, e.jsonSizeLimit)
|
||||
if err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusRequestEntityTooLarge, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
bodySize = len(bodyBytes)
|
||||
|
||||
req := &GetCardChartDataRequest{}
|
||||
if err := json.Unmarshal(bodyBytes, req); err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
||||
validate := validator.New()
|
||||
err = validate.Struct(req)
|
||||
|
||||
// TODO get card chart data from ClickHouse
|
||||
jsonInput := `
|
||||
{
|
||||
"data": [
|
||||
{
|
||||
"timestamp": 1733934939000,
|
||||
"Series A": 100,
|
||||
"Series B": 200
|
||||
},
|
||||
{
|
||||
"timestamp": 1733935939000,
|
||||
"Series A": 150,
|
||||
"Series B": 250
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
||||
var resp GetCardChartDataResponse
|
||||
err = json.Unmarshal([]byte(jsonInput), &resp)
|
||||
if err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
||||
e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize)
|
||||
}
|
||||
92
backend/pkg/analytics/api/card.go
Normal file
92
backend/pkg/analytics/api/card.go
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
// CardBase Common fields for the Card entity
|
||||
type CardBase struct {
|
||||
Name string `json:"name" validate:"required"`
|
||||
IsPublic bool `json:"isPublic" validate:"omitempty"`
|
||||
DefaultConfig map[string]any `json:"defaultConfig"`
|
||||
Thumbnail *string `json:"thumbnail" validate:"omitempty,url"`
|
||||
MetricType string `json:"metricType" validate:"required,oneof=timeseries table funnel"`
|
||||
MetricOf string `json:"metricOf" validate:"required,oneof=session_count user_count"`
|
||||
MetricFormat string `json:"metricFormat" validate:"required,oneof=default percentage"`
|
||||
ViewType string `json:"viewType" validate:"required,oneof=line_chart table_view"`
|
||||
MetricValue []string `json:"metricValue" validate:"omitempty"`
|
||||
SessionID *int64 `json:"sessionId" validate:"omitempty"`
|
||||
Series []CardSeries `json:"series" validate:"required,dive"`
|
||||
}
|
||||
|
||||
// Card Fields specific to database operations
|
||||
type Card struct {
|
||||
CardBase
|
||||
ProjectID int64 `json:"projectId" validate:"required"`
|
||||
UserID int64 `json:"userId" validate:"required"`
|
||||
CardID int64 `json:"cardId"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
DeletedAt *time.Time `json:"deleted_at,omitempty"`
|
||||
EditedAt *time.Time `json:"edited_at,omitempty"`
|
||||
}
|
||||
|
||||
type CardSeries struct {
|
||||
SeriesID int64 `json:"seriesId" validate:"omitempty"`
|
||||
MetricID int64 `json:"metricId" validate:"omitempty"`
|
||||
Name string `json:"name" validate:"required"`
|
||||
CreatedAt time.Time `json:"createdAt" validate:"omitempty"`
|
||||
DeletedAt *time.Time `json:"deletedAt" validate:"omitempty"`
|
||||
Index int64 `json:"index" validate:"required"`
|
||||
Filter SeriesFilter `json:"filter"`
|
||||
}
|
||||
|
||||
type SeriesFilter struct {
|
||||
EventOrder string `json:"eventOrder" validate:"required,oneof=then or and"`
|
||||
Filters []FilterItem `json:"filters"`
|
||||
}
|
||||
|
||||
type FilterItem struct {
|
||||
Type string `json:"type" validate:"required"`
|
||||
Operator string `json:"operator" validate:"required"`
|
||||
Source string `json:"source" validate:"required"`
|
||||
SourceOperator string `json:"sourceOperator" validate:"required"`
|
||||
Value []string `json:"value" validate:"required,dive,required"`
|
||||
IsEvent bool `json:"isEvent"`
|
||||
}
|
||||
|
||||
// CardCreateRequest Fields required for creating a card (from the frontend)
|
||||
type CardCreateRequest struct {
|
||||
CardBase
|
||||
}
|
||||
|
||||
type CardGetResponse struct {
|
||||
Card
|
||||
}
|
||||
|
||||
type CardUpdateRequest struct {
|
||||
CardBase
|
||||
}
|
||||
|
||||
type GetCardsResponse struct {
|
||||
Cards []Card `json:"cards"`
|
||||
Total int64 `json:"total"`
|
||||
}
|
||||
|
||||
type DataPoint struct {
|
||||
Timestamp int64 `json:"timestamp"`
|
||||
Series map[string]int64 `json:"series"`
|
||||
}
|
||||
|
||||
type GetCardChartDataRequest struct {
|
||||
ProjectID int64 `json:"projectId" validate:"required"`
|
||||
MetricType string `json:"metricType" validate:"required,oneof=timeseries table funnel"`
|
||||
MetricOf string `json:"metricOf" validate:"required,oneof=session_count user_count"`
|
||||
MetricFormat string `json:"metricFormat" validate:"required,oneof=default percentage"`
|
||||
SessionID int64 `json:"sessionId" validate:"required"`
|
||||
Series []CardSeries `json:"series"`
|
||||
}
|
||||
|
||||
type GetCardChartDataResponse struct {
|
||||
Data []DataPoint `json:"data"`
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package api
|
||||
package models
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
|
@ -11,7 +11,7 @@ import (
|
|||
"time"
|
||||
)
|
||||
|
||||
func getId(r *http.Request) (int, error) {
|
||||
func getDashboardId(r *http.Request) (int, error) {
|
||||
vars := mux.Vars(r)
|
||||
idStr := vars["id"]
|
||||
if idStr == "" {
|
||||
|
|
@ -64,7 +64,7 @@ func (e *handlersImpl) getDashboards(w http.ResponseWriter, r *http.Request) {
|
|||
startTime := time.Now()
|
||||
bodySize := 0
|
||||
|
||||
//id, err := getId(r)
|
||||
//id, err := getDashboardId(r)
|
||||
//if err != nil {
|
||||
// e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
// return
|
||||
|
|
@ -90,7 +90,7 @@ func (e *handlersImpl) getDashboard(w http.ResponseWriter, r *http.Request) {
|
|||
startTime := time.Now()
|
||||
bodySize := 0
|
||||
|
||||
id, err := getId(r)
|
||||
id, err := getDashboardId(r)
|
||||
if err != nil {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
|
|
@ -113,7 +113,7 @@ func (e *handlersImpl) updateDashboard(w http.ResponseWriter, r *http.Request) {
|
|||
startTime := time.Now()
|
||||
bodySize := 0
|
||||
|
||||
//id, err := getId(r)
|
||||
//id, err := getDashboardId(r)
|
||||
//if err != nil {
|
||||
// e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
// return
|
||||
|
|
@ -149,7 +149,7 @@ func (e *handlersImpl) deleteDashboard(w http.ResponseWriter, r *http.Request) {
|
|||
startTime := time.Now()
|
||||
bodySize := 0
|
||||
|
||||
//id, err := getId(r)
|
||||
//id, err := getDashboardId(r)
|
||||
//if err != nil {
|
||||
// e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
// return
|
||||
|
|
@ -163,7 +163,7 @@ func (e *handlersImpl) pinDashboard(w http.ResponseWriter, r *http.Request) {
|
|||
startTime := time.Now()
|
||||
bodySize := 0
|
||||
|
||||
//id, err := getId(r)
|
||||
//id, err := getDashboardId(r)
|
||||
//if err != nil {
|
||||
// e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
// return
|
||||
|
|
@ -179,7 +179,7 @@ func (e *handlersImpl) addCardToDashboard(w http.ResponseWriter, r *http.Request
|
|||
startTime := time.Now()
|
||||
bodySize := 0
|
||||
|
||||
//id, err := getId(r)
|
||||
//id, err := getDashboardId(r)
|
||||
//if err != nil {
|
||||
// e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
// return
|
||||
|
|
@ -195,7 +195,7 @@ func (e *handlersImpl) removeCardFromDashboard(w http.ResponseWriter, r *http.Re
|
|||
startTime := time.Now()
|
||||
bodySize := 0
|
||||
|
||||
//id, err := getId(r)
|
||||
//id, err := getDashboardId(r)
|
||||
//if err != nil {
|
||||
// e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
|
||||
// return
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
package api
|
||||
package models
|
||||
|
||||
import (
|
||||
config "openreplay/backend/internal/config/analytics"
|
||||
|
|
@ -25,6 +25,13 @@ func (e *handlersImpl) GetAll() []*api.Description {
|
|||
{"/v1/analytics/{projectId}/dashboards/{id}", e.getDashboard, "GET"},
|
||||
{"/v1/analytics/{projectId}/dashboards/{id}", e.updateDashboard, "PUT"},
|
||||
{"/v1/analytics/{projectId}/dashboards/{id}", e.deleteDashboard, "DELETE"},
|
||||
{"/v1/analytics/{projectId}/cards", e.createCard, "POST"},
|
||||
{"/v1/analytics/{projectId}/cards", e.getCards, "GET"},
|
||||
{"/v1/analytics/{projectId}/cards/{id}", e.getCard, "GET"},
|
||||
{"/v1/analytics/{projectId}/cards/{id}", e.updateCard, "PUT"},
|
||||
{"/v1/analytics/{projectId}/cards/{id}", e.deleteCard, "DELETE"},
|
||||
{"/v1/analytics/{projectId}/cards/{id}/chart", e.getCardChartData, "POST"},
|
||||
{"/v1/analytics/{projectId}/cards/{id}/try", e.getCardChartData, "POST"},
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
package api
|
||||
package models
|
||||
|
||||
type Dashboard struct {
|
||||
DashboardID int `json:"dashboard_id"`
|
||||
|
|
|
|||
32
ee/api/.gitignore
vendored
32
ee/api/.gitignore
vendored
|
|
@ -188,30 +188,41 @@ Pipfile.lock
|
|||
/chalicelib/core/assist.py
|
||||
/chalicelib/core/authorizers.py
|
||||
/chalicelib/core/autocomplete/*
|
||||
/chalicelib/core/boarding.py
|
||||
/chalicelib/core/canvas.py
|
||||
/chalicelib/core/collaborations/*
|
||||
/chalicelib/core/countries.py
|
||||
/chalicelib/core/metrics.py
|
||||
/chalicelib/core/custom_metrics.py
|
||||
/chalicelib/core/custom_metrics_predefined.py
|
||||
/chalicelib/core/dashboards.py
|
||||
/chalicelib/core/errors_favorite.py
|
||||
/chalicelib/core/metrics/metrics.py
|
||||
/chalicelib/core/metrics/custom_metrics.py
|
||||
/chalicelib/core/metrics/custom_metrics_predefined.py
|
||||
/chalicelib/core/metrics/dashboards.py
|
||||
/chalicelib/core/metrics/funnels.py
|
||||
/chalicelib/core/metrics/heatmaps.py
|
||||
/chalicelib/core/metrics/heatmaps_ch.py
|
||||
/chalicelib/core/metrics/metrics_ch.py
|
||||
/chalicelib/core/metrics/product_analytics.py
|
||||
/chalicelib/core/metrics/product_analytics_ch.py
|
||||
/chalicelib/core/metrics/product_anaytics2.py
|
||||
/chalicelib/core/events.py
|
||||
/chalicelib/core/events_mobile.py
|
||||
/chalicelib/core/feature_flags.py
|
||||
/chalicelib/core/funnels.py
|
||||
/chalicelib/core/issue_tracking/*.py
|
||||
/chalicelib/core/issue_tracking/*
|
||||
/chalicelib/core/issues.py
|
||||
/chalicelib/core/jobs.py
|
||||
/chalicelib/core/log_tools/*.py
|
||||
/chalicelib/core/log_tools/*
|
||||
/chalicelib/core/metadata.py
|
||||
/chalicelib/core/mobile.py
|
||||
/chalicelib/core/saved_search.py
|
||||
/chalicelib/core/sessions/sessions.py
|
||||
/chalicelib/core/sessions/sessions_ch.py
|
||||
/chalicelib/core/sessions/sessions_devtool.py
|
||||
/chalicelib/core/sessions/sessions_favorite.py
|
||||
/chalicelib/core/sessions/sessions_assignments.py
|
||||
/chalicelib/core/sessions/sessions_metas.py
|
||||
/chalicelib/core/sessions/sessions_mobs.py
|
||||
/chalicelib/core/sessions/sessions_replay.py
|
||||
/chalicelib/core/sessions/performance_event.py
|
||||
/chalicelib/core/sessions/sessions_viewed.py
|
||||
/chalicelib/core/sessions/unprocessed_sessions.py
|
||||
/chalicelib/core/significance.py
|
||||
/chalicelib/core/socket_ios.py
|
||||
|
|
@ -276,3 +287,8 @@ Pipfile.lock
|
|||
/chalicelib/core/alerts/alerts_processor_ch.py
|
||||
/chalicelib/core/alerts/alerts_listener.py
|
||||
/chalicelib/core/alerts/modules/helpers.py
|
||||
/chalicelib/core/errors/modules/*
|
||||
/chalicelib/core/errors/errors.py
|
||||
/chalicelib/core/errors/errors_ch.py
|
||||
/chalicelib/core/errors/errors_favorite.py
|
||||
/chalicelib/core/errors/errors_viewed.py
|
||||
|
|
|
|||
|
|
@ -3,31 +3,9 @@ import logging
|
|||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
from . import custom_metrics as custom_metrics_legacy
|
||||
from . import custom_metrics_ee as custom_metrics
|
||||
from . import metrics_ch as metrics
|
||||
from . import metrics as metrics_legacy
|
||||
|
||||
|
||||
if config("EXP_AUTOCOMPLETE", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental autocomplete")
|
||||
else:
|
||||
from . import autocomplete as autocomplete
|
||||
|
||||
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental error search")
|
||||
from . import errors as errors_legacy
|
||||
from . import errors_exp as errors
|
||||
|
||||
if config("EXP_ERRORS_GET", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental error get")
|
||||
else:
|
||||
from . import errors as errors
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH_METRIC", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental sessions search for metrics")
|
||||
|
||||
if config("EXP_FUNNELS", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental funnels")
|
||||
from . import significance_exp as significance
|
||||
else:
|
||||
from . import significance as significance
|
||||
|
|
|
|||
|
|
@ -1,119 +0,0 @@
|
|||
from chalicelib.utils import pg_client
|
||||
from chalicelib.core import log_tool_datadog, log_tool_stackdriver, log_tool_sentry
|
||||
|
||||
from chalicelib.core import users
|
||||
from chalicelib.core import projects
|
||||
|
||||
|
||||
def get_state(tenant_id):
|
||||
pids = projects.get_projects_ids(tenant_id=tenant_id)
|
||||
with pg_client.PostgresClient() as cur:
|
||||
recorded = False
|
||||
meta = False
|
||||
|
||||
if len(pids) > 0:
|
||||
cur.execute(
|
||||
cur.mogrify("""SELECT EXISTS(( SELECT 1
|
||||
FROM public.sessions AS s
|
||||
WHERE s.project_id IN %(ids)s)) AS exists;""",
|
||||
{"ids": tuple(pids)})
|
||||
)
|
||||
recorded = cur.fetchone()["exists"]
|
||||
meta = False
|
||||
if recorded:
|
||||
cur.execute(
|
||||
cur.mogrify("""SELECT EXISTS((SELECT 1
|
||||
FROM public.projects AS p
|
||||
LEFT JOIN LATERAL ( SELECT 1
|
||||
FROM public.sessions
|
||||
WHERE sessions.project_id = p.project_id
|
||||
AND sessions.user_id IS NOT NULL
|
||||
LIMIT 1) AS sessions(user_id) ON (TRUE)
|
||||
WHERE p.tenant_id = %(tenant_id)s AND p.deleted_at ISNULL
|
||||
AND ( sessions.user_id IS NOT NULL OR p.metadata_1 IS NOT NULL
|
||||
OR p.metadata_2 IS NOT NULL OR p.metadata_3 IS NOT NULL
|
||||
OR p.metadata_4 IS NOT NULL OR p.metadata_5 IS NOT NULL
|
||||
OR p.metadata_6 IS NOT NULL OR p.metadata_7 IS NOT NULL
|
||||
OR p.metadata_8 IS NOT NULL OR p.metadata_9 IS NOT NULL
|
||||
OR p.metadata_10 IS NOT NULL )
|
||||
)) AS exists;"""
|
||||
, {"tenant_id": tenant_id}))
|
||||
|
||||
meta = cur.fetchone()["exists"]
|
||||
|
||||
return [
|
||||
{"task": "Install OpenReplay",
|
||||
"done": recorded,
|
||||
"URL": "https://docs.openreplay.com/getting-started/quick-start"},
|
||||
{"task": "Identify Users",
|
||||
"done": meta,
|
||||
"URL": "https://docs.openreplay.com/data-privacy-security/metadata"},
|
||||
{"task": "Invite Team Members",
|
||||
"done": len(users.get_members(tenant_id=tenant_id)) > 1,
|
||||
"URL": "https://app.openreplay.com/client/manage-users"},
|
||||
{"task": "Integrations",
|
||||
"done": len(log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \
|
||||
or len(log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \
|
||||
or len(log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0,
|
||||
"URL": "https://docs.openreplay.com/integrations"}
|
||||
]
|
||||
|
||||
|
||||
def get_state_installing(tenant_id):
|
||||
pids = projects.get_projects_ids(tenant_id=tenant_id)
|
||||
with pg_client.PostgresClient() as cur:
|
||||
recorded = False
|
||||
|
||||
if len(pids) > 0:
|
||||
cur.execute(
|
||||
cur.mogrify("""SELECT EXISTS(( SELECT 1
|
||||
FROM public.sessions AS s
|
||||
WHERE s.project_id IN %(ids)s)) AS exists;""",
|
||||
{"ids": tuple(pids)})
|
||||
)
|
||||
recorded = cur.fetchone()["exists"]
|
||||
|
||||
return {"task": "Install OpenReplay",
|
||||
"done": recorded,
|
||||
"URL": "https://docs.openreplay.com/getting-started/quick-start"}
|
||||
|
||||
|
||||
def get_state_identify_users(tenant_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""SELECT EXISTS((SELECT 1
|
||||
FROM public.projects AS p
|
||||
LEFT JOIN LATERAL ( SELECT 1
|
||||
FROM public.sessions
|
||||
WHERE sessions.project_id = p.project_id
|
||||
AND sessions.user_id IS NOT NULL
|
||||
LIMIT 1) AS sessions(user_id) ON (TRUE)
|
||||
WHERE p.tenant_id = %(tenant_id)s AND p.deleted_at ISNULL
|
||||
AND ( sessions.user_id IS NOT NULL OR p.metadata_1 IS NOT NULL
|
||||
OR p.metadata_2 IS NOT NULL OR p.metadata_3 IS NOT NULL
|
||||
OR p.metadata_4 IS NOT NULL OR p.metadata_5 IS NOT NULL
|
||||
OR p.metadata_6 IS NOT NULL OR p.metadata_7 IS NOT NULL
|
||||
OR p.metadata_8 IS NOT NULL OR p.metadata_9 IS NOT NULL
|
||||
OR p.metadata_10 IS NOT NULL )
|
||||
)) AS exists;"""
|
||||
, {"tenant_id": tenant_id}))
|
||||
|
||||
meta = cur.fetchone()["exists"]
|
||||
|
||||
return {"task": "Identify Users",
|
||||
"done": meta,
|
||||
"URL": "https://docs.openreplay.com/data-privacy-security/metadata"}
|
||||
|
||||
|
||||
def get_state_manage_users(tenant_id):
|
||||
return {"task": "Invite Team Members",
|
||||
"done": len(users.get_members(tenant_id=tenant_id)) > 1,
|
||||
"URL": "https://app.openreplay.com/client/manage-users"}
|
||||
|
||||
|
||||
def get_state_integrations(tenant_id):
|
||||
return {"task": "Integrations",
|
||||
"done": len(log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \
|
||||
or len(log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \
|
||||
or len(log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0,
|
||||
"URL": "https://docs.openreplay.com/integrations"}
|
||||
|
|
@ -1,236 +0,0 @@
|
|||
import json
|
||||
import logging
|
||||
|
||||
from decouple import config
|
||||
from fastapi import HTTPException, status
|
||||
from .custom_metrics import *
|
||||
import schemas
|
||||
from chalicelib.core import funnels, issues, heatmaps, sessions_mobs, sessions_favorite, \
|
||||
product_analytics, custom_metrics_predefined
|
||||
from chalicelib.utils import helper, pg_client
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.storage import extra
|
||||
|
||||
# TODO: fix this import
|
||||
from . import errors as errors
|
||||
# if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
|
||||
# logging.info(">>> Using experimental error search")
|
||||
# from . import errors_exp as errors
|
||||
# else:
|
||||
# from . import errors as errors
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH_METRIC", cast=bool, default=False):
|
||||
from chalicelib.core import sessions
|
||||
else:
|
||||
from chalicelib.core import sessions_legacy as sessions
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# TODO: refactor this to split
|
||||
# timeseries /
|
||||
# table of errors / table of issues / table of browsers / table of devices / table of countries / table of URLs
|
||||
# remove "table of" calls from this function
|
||||
def __try_live(project_id, data: schemas.CardSchema):
|
||||
results = []
|
||||
for i, s in enumerate(data.series):
|
||||
results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
|
||||
view_type=data.view_type, metric_type=data.metric_type,
|
||||
metric_of=data.metric_of, metric_value=data.metric_value))
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def __get_table_of_series(project_id, data: schemas.CardSchema):
|
||||
results = []
|
||||
for i, s in enumerate(data.series):
|
||||
results.append(sessions.search2_table(data=s.filter, project_id=project_id, density=data.density,
|
||||
metric_of=data.metric_of, metric_value=data.metric_value,
|
||||
metric_format=data.metric_format))
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def __get_errors_list(project: schemas.ProjectContext, user_id, data: schemas.CardSchema):
|
||||
if len(data.series) == 0:
|
||||
return {
|
||||
"total": 0,
|
||||
"errors": []
|
||||
}
|
||||
return errors.search(data.series[0].filter, project_id=project.project_id, user_id=user_id)
|
||||
|
||||
|
||||
def __get_sessions_list(project: schemas.ProjectContext, user_id, data: schemas.CardSchema):
|
||||
if len(data.series) == 0:
|
||||
logger.debug("empty series")
|
||||
return {
|
||||
"total": 0,
|
||||
"sessions": []
|
||||
}
|
||||
return sessions.search_sessions(data=data.series[0].filter, project_id=project.project_id, user_id=user_id)
|
||||
|
||||
|
||||
def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
|
||||
# No need for this because UI is sending the full payload
|
||||
# card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
# if card is None:
|
||||
# return None
|
||||
# metric: schemas.CardSchema = schemas.CardSchema(**card)
|
||||
# metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||
if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id):
|
||||
return None
|
||||
results = []
|
||||
for s in data.series:
|
||||
results.append({"seriesId": s.series_id, "seriesName": s.name,
|
||||
**sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
|
||||
results = []
|
||||
if len(data.series) == 0:
|
||||
return results
|
||||
for s in data.series:
|
||||
if len(data.filters) > 0:
|
||||
s.filter.filters += data.filters
|
||||
s.filter = schemas.SessionsSearchPayloadSchema(**s.filter.model_dump(by_alias=True))
|
||||
|
||||
results.append({"seriesId": None, "seriesName": s.name,
|
||||
**sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def create_card(project: schemas.ProjectContext, user_id, data: schemas.CardSchema, dashboard=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
session_data = None
|
||||
if data.metric_type == schemas.MetricType.HEAT_MAP:
|
||||
if data.session_id is not None:
|
||||
session_data = {"sessionId": data.session_id}
|
||||
else:
|
||||
session_data = __get_heat_map_chart(project=project, user_id=user_id,
|
||||
data=data, include_mobs=False)
|
||||
if session_data is not None:
|
||||
session_data = {"sessionId": session_data["sessionId"]}
|
||||
|
||||
if session_data is not None:
|
||||
# for EE only
|
||||
keys = sessions_mobs. \
|
||||
__get_mob_keys(project_id=project.project_id, session_id=session_data["sessionId"])
|
||||
keys += sessions_mobs. \
|
||||
__get_mob_keys_deprecated(session_id=session_data["sessionId"]) # To support old sessions
|
||||
tag = config('RETENTION_L_VALUE', default='vault')
|
||||
for k in keys:
|
||||
try:
|
||||
extra.tag_session(file_key=k, tag_value=tag)
|
||||
except Exception as e:
|
||||
logger.warning(f"!!!Error while tagging: {k} to {tag} for heatMap")
|
||||
logger.error(str(e))
|
||||
|
||||
_data = {"session_data": json.dumps(session_data) if session_data is not None else None}
|
||||
for i, s in enumerate(data.series):
|
||||
for k in s.model_dump().keys():
|
||||
_data[f"{k}_{i}"] = s.__getattribute__(k)
|
||||
_data[f"index_{i}"] = i
|
||||
_data[f"filter_{i}"] = s.filter.json()
|
||||
series_len = len(data.series)
|
||||
params = {"user_id": user_id, "project_id": project.project_id, **data.model_dump(), **_data,
|
||||
"default_config": json.dumps(data.default_config.model_dump()), "card_info": None}
|
||||
if data.metric_type == schemas.MetricType.PATH_ANALYSIS:
|
||||
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))
|
||||
|
||||
query = """INSERT INTO metrics (project_id, user_id, name, is_public,
|
||||
view_type, metric_type, metric_of, metric_value,
|
||||
metric_format, default_config, thumbnail, data,
|
||||
card_info)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
|
||||
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
|
||||
%(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s,
|
||||
%(card_info)s)
|
||||
RETURNING metric_id"""
|
||||
if len(data.series) > 0:
|
||||
query = f"""WITH m AS ({query})
|
||||
INSERT INTO metric_series(metric_id, index, name, filter)
|
||||
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)"
|
||||
for i in range(series_len)])}
|
||||
RETURNING metric_id;"""
|
||||
|
||||
query = cur.mogrify(query, params)
|
||||
cur.execute(query)
|
||||
r = cur.fetchone()
|
||||
if dashboard:
|
||||
return r["metric_id"]
|
||||
return {"data": get_card(metric_id=r["metric_id"], project_id=project.project_id, user_id=user_id)}
|
||||
|
||||
|
||||
def delete_card(project_id, metric_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.metrics
|
||||
SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now())
|
||||
WHERE project_id = %(project_id)s
|
||||
AND metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public)
|
||||
RETURNING data;""",
|
||||
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id})
|
||||
)
|
||||
# for EE only
|
||||
row = cur.fetchone()
|
||||
if row:
|
||||
if row["data"] and not sessions_favorite.favorite_session_exists(session_id=row["data"]["sessionId"]):
|
||||
keys = sessions_mobs. \
|
||||
__get_mob_keys(project_id=project_id, session_id=row["data"]["sessionId"])
|
||||
keys += sessions_mobs. \
|
||||
__get_mob_keys_deprecated(session_id=row["data"]["sessionId"]) # To support old sessions
|
||||
tag = config('RETENTION_D_VALUE', default='default')
|
||||
for k in keys:
|
||||
try:
|
||||
extra.tag_session(file_key=k, tag_value=tag)
|
||||
except Exception as e:
|
||||
logger.warning(f"!!!Error while tagging: {k} to {tag} for heatMap")
|
||||
logger.error(str(e))
|
||||
return {"state": "success"}
|
||||
|
||||
|
||||
def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
|
||||
data: schemas.CardSessionsSchema
|
||||
# , range_value=None, start_date=None, end_date=None
|
||||
):
|
||||
# No need for this because UI is sending the full payload
|
||||
# card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
# if card is None:
|
||||
# return None
|
||||
# metric: schemas.CardSchema = schemas.CardSchema(**card)
|
||||
# metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||
# if metric is None:
|
||||
# return None
|
||||
if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id):
|
||||
return None
|
||||
for s in data.series:
|
||||
s.filter.startTimestamp = data.startTimestamp
|
||||
s.filter.endTimestamp = data.endTimestamp
|
||||
s.filter.limit = data.limit
|
||||
s.filter.page = data.page
|
||||
issues_list = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {})
|
||||
issues_list = issues_list.get("significant", []) + issues_list.get("insignificant", [])
|
||||
issue = None
|
||||
for i in issues_list:
|
||||
if i.get("issueId", "") == issue_id:
|
||||
issue = i
|
||||
break
|
||||
if issue is None:
|
||||
issue = issues.get(project_id=project_id, issue_id=issue_id)
|
||||
if issue is not None:
|
||||
issue = {**issue,
|
||||
"affectedSessions": 0,
|
||||
"affectedUsers": 0,
|
||||
"conversionImpact": 0,
|
||||
"lostConversions": 0,
|
||||
"unaffectedSessions": 0}
|
||||
return {"seriesId": s.series_id, "seriesName": s.name,
|
||||
"sessions": sessions.search_sessions(user_id=user_id, project_id=project_id,
|
||||
issue=issue, data=s.filter)
|
||||
if issue is not None else {"total": 0, "sessions": []},
|
||||
"issue": issue}
|
||||
|
|
@ -1,609 +0,0 @@
|
|||
import json
|
||||
|
||||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import sourcemaps
|
||||
from chalicelib.utils import errors_helper
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.metrics_helper import __get_step_size
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
from chalicelib.core import sessions_legacy as sessions
|
||||
else:
|
||||
from chalicelib.core import sessions
|
||||
|
||||
|
||||
def get(error_id, family=False):
|
||||
if family:
|
||||
return get_batch([error_id])
|
||||
with pg_client.PostgresClient() as cur:
|
||||
# trying: return only 1 error, without event details
|
||||
query = cur.mogrify(
|
||||
# "SELECT * FROM events.errors AS e INNER JOIN public.errors AS re USING(error_id) WHERE error_id = %(error_id)s;",
|
||||
"SELECT * FROM public.errors WHERE error_id = %(error_id)s LIMIT 1;",
|
||||
{"error_id": error_id})
|
||||
cur.execute(query=query)
|
||||
result = cur.fetchone()
|
||||
if result is not None:
|
||||
result["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(result["stacktrace_parsed_at"])
|
||||
return helper.dict_to_camel_case(result)
|
||||
|
||||
|
||||
def get_batch(error_ids):
|
||||
if len(error_ids) == 0:
|
||||
return []
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""
|
||||
WITH RECURSIVE error_family AS (
|
||||
SELECT *
|
||||
FROM public.errors
|
||||
WHERE error_id IN %(error_ids)s
|
||||
UNION
|
||||
SELECT child_errors.*
|
||||
FROM public.errors AS child_errors
|
||||
INNER JOIN error_family ON error_family.error_id = child_errors.parent_error_id OR error_family.parent_error_id = child_errors.error_id
|
||||
)
|
||||
SELECT *
|
||||
FROM error_family;""",
|
||||
{"error_ids": tuple(error_ids)})
|
||||
cur.execute(query=query)
|
||||
errors = cur.fetchall()
|
||||
for e in errors:
|
||||
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
|
||||
return helper.list_to_camel_case(errors)
|
||||
|
||||
|
||||
def __flatten_sort_key_count_version(data, merge_nested=False):
|
||||
if data is None:
|
||||
return []
|
||||
return sorted(
|
||||
[
|
||||
{
|
||||
"name": f'{o["name"]}@{v["version"]}',
|
||||
"count": v["count"]
|
||||
} for o in data for v in o["partition"]
|
||||
],
|
||||
key=lambda o: o["count"], reverse=True) if merge_nested else \
|
||||
[
|
||||
{
|
||||
"name": o["name"],
|
||||
"count": o["count"],
|
||||
} for o in data
|
||||
]
|
||||
|
||||
|
||||
def __process_tags(row):
|
||||
return [
|
||||
{"name": "browser", "partitions": __flatten_sort_key_count_version(data=row.get("browsers_partition"))},
|
||||
{"name": "browser.ver",
|
||||
"partitions": __flatten_sort_key_count_version(data=row.pop("browsers_partition"), merge_nested=True)},
|
||||
{"name": "OS", "partitions": __flatten_sort_key_count_version(data=row.get("os_partition"))},
|
||||
{"name": "OS.ver",
|
||||
"partitions": __flatten_sort_key_count_version(data=row.pop("os_partition"), merge_nested=True)},
|
||||
{"name": "device.family", "partitions": __flatten_sort_key_count_version(data=row.get("device_partition"))},
|
||||
{"name": "device",
|
||||
"partitions": __flatten_sort_key_count_version(data=row.pop("device_partition"), merge_nested=True)},
|
||||
{"name": "country", "partitions": row.pop("country_partition")}
|
||||
]
|
||||
|
||||
|
||||
def get_details(project_id, error_id, user_id, **data):
|
||||
pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24")
|
||||
pg_sub_query24.append("error_id = %(error_id)s")
|
||||
pg_sub_query30_session = __get_basic_constraints(time_constraint=True, chart=False,
|
||||
startTime_arg_name="startDate30",
|
||||
endTime_arg_name="endDate30", project_key="sessions.project_id")
|
||||
pg_sub_query30_session.append("sessions.start_ts >= %(startDate30)s")
|
||||
pg_sub_query30_session.append("sessions.start_ts <= %(endDate30)s")
|
||||
pg_sub_query30_session.append("error_id = %(error_id)s")
|
||||
pg_sub_query30_err = __get_basic_constraints(time_constraint=True, chart=False, startTime_arg_name="startDate30",
|
||||
endTime_arg_name="endDate30", project_key="errors.project_id")
|
||||
pg_sub_query30_err.append("sessions.project_id = %(project_id)s")
|
||||
pg_sub_query30_err.append("sessions.start_ts >= %(startDate30)s")
|
||||
pg_sub_query30_err.append("sessions.start_ts <= %(endDate30)s")
|
||||
pg_sub_query30_err.append("error_id = %(error_id)s")
|
||||
pg_sub_query30_err.append("source ='js_exception'")
|
||||
pg_sub_query30 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size30")
|
||||
pg_sub_query30.append("error_id = %(error_id)s")
|
||||
pg_basic_query = __get_basic_constraints(time_constraint=False)
|
||||
pg_basic_query.append("error_id = %(error_id)s")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
data["startDate24"] = TimeUTC.now(-1)
|
||||
data["endDate24"] = TimeUTC.now()
|
||||
data["startDate30"] = TimeUTC.now(-30)
|
||||
data["endDate30"] = TimeUTC.now()
|
||||
density24 = int(data.get("density24", 24))
|
||||
step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24, factor=1)
|
||||
density30 = int(data.get("density30", 30))
|
||||
step_size30 = __get_step_size(data["startDate30"], data["endDate30"], density30, factor=1)
|
||||
params = {
|
||||
"startDate24": data['startDate24'],
|
||||
"endDate24": data['endDate24'],
|
||||
"startDate30": data['startDate30'],
|
||||
"endDate30": data['endDate30'],
|
||||
"project_id": project_id,
|
||||
"userId": user_id,
|
||||
"step_size24": step_size24,
|
||||
"step_size30": step_size30,
|
||||
"error_id": error_id}
|
||||
|
||||
main_pg_query = f"""\
|
||||
SELECT error_id,
|
||||
name,
|
||||
message,
|
||||
users,
|
||||
sessions,
|
||||
last_occurrence,
|
||||
first_occurrence,
|
||||
last_session_id,
|
||||
browsers_partition,
|
||||
os_partition,
|
||||
device_partition,
|
||||
country_partition,
|
||||
chart24,
|
||||
chart30,
|
||||
custom_tags
|
||||
FROM (SELECT error_id,
|
||||
name,
|
||||
message,
|
||||
COUNT(DISTINCT user_id) AS users,
|
||||
COUNT(DISTINCT session_id) AS sessions
|
||||
FROM public.errors
|
||||
INNER JOIN events.errors AS s_errors USING (error_id)
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30_err)}
|
||||
GROUP BY error_id, name, message) AS details
|
||||
INNER JOIN (SELECT MAX(timestamp) AS last_occurrence,
|
||||
MIN(timestamp) AS first_occurrence
|
||||
FROM events.errors
|
||||
WHERE error_id = %(error_id)s) AS time_details ON (TRUE)
|
||||
INNER JOIN (SELECT session_id AS last_session_id,
|
||||
coalesce(custom_tags, '[]')::jsonb AS custom_tags
|
||||
FROM events.errors
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT jsonb_agg(jsonb_build_object(errors_tags.key, errors_tags.value)) AS custom_tags
|
||||
FROM errors_tags
|
||||
WHERE errors_tags.error_id = %(error_id)s
|
||||
AND errors_tags.session_id = errors.session_id
|
||||
AND errors_tags.message_id = errors.message_id) AS errors_tags ON (TRUE)
|
||||
WHERE error_id = %(error_id)s
|
||||
ORDER BY errors.timestamp DESC
|
||||
LIMIT 1) AS last_session_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(browser_details) AS browsers_partition
|
||||
FROM (SELECT *
|
||||
FROM (SELECT user_browser AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors
|
||||
INNER JOIN sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
GROUP BY user_browser
|
||||
ORDER BY count DESC) AS count_per_browser_query
|
||||
INNER JOIN LATERAL (SELECT JSONB_AGG(version_details) AS partition
|
||||
FROM (SELECT user_browser_version AS version,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
AND sessions.user_browser = count_per_browser_query.name
|
||||
GROUP BY user_browser_version
|
||||
ORDER BY count DESC) AS version_details
|
||||
) AS browser_version_details ON (TRUE)) AS browser_details) AS browser_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(os_details) AS os_partition
|
||||
FROM (SELECT *
|
||||
FROM (SELECT user_os AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
GROUP BY user_os
|
||||
ORDER BY count DESC) AS count_per_os_details
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition
|
||||
FROM (SELECT COALESCE(user_os_version,'unknown') AS version, COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
AND sessions.user_os = count_per_os_details.name
|
||||
GROUP BY user_os_version
|
||||
ORDER BY count DESC) AS count_per_version_details
|
||||
GROUP BY count_per_os_details.name ) AS os_version_details
|
||||
ON (TRUE)) AS os_details) AS os_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(device_details) AS device_partition
|
||||
FROM (SELECT *
|
||||
FROM (SELECT user_device_type AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
GROUP BY user_device_type
|
||||
ORDER BY count DESC) AS count_per_device_details
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_v_details) AS partition
|
||||
FROM (SELECT CASE
|
||||
WHEN user_device = '' OR user_device ISNULL
|
||||
THEN 'unknown'
|
||||
ELSE user_device END AS version,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
AND sessions.user_device_type = count_per_device_details.name
|
||||
GROUP BY user_device
|
||||
ORDER BY count DESC) AS count_per_device_v_details
|
||||
GROUP BY count_per_device_details.name ) AS device_version_details
|
||||
ON (TRUE)) AS device_details) AS device_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(count_per_country_details) AS country_partition
|
||||
FROM (SELECT user_country AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
GROUP BY user_country
|
||||
ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(chart_details) AS chart24
|
||||
FROM (SELECT generated_timestamp AS timestamp,
|
||||
COUNT(session_id) AS count
|
||||
FROM generate_series(%(startDate24)s, %(endDate24)s, %(step_size24)s) AS generated_timestamp
|
||||
LEFT JOIN LATERAL (SELECT DISTINCT session_id
|
||||
FROM events.errors
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query24)}
|
||||
) AS chart_details ON (TRUE)
|
||||
GROUP BY generated_timestamp
|
||||
ORDER BY generated_timestamp) AS chart_details) AS chart_details24 ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(chart_details) AS chart30
|
||||
FROM (SELECT generated_timestamp AS timestamp,
|
||||
COUNT(session_id) AS count
|
||||
FROM generate_series(%(startDate30)s, %(endDate30)s, %(step_size30)s) AS generated_timestamp
|
||||
LEFT JOIN LATERAL (SELECT DISTINCT session_id
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30)}) AS chart_details
|
||||
ON (TRUE)
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp) AS chart_details) AS chart_details30 ON (TRUE);
|
||||
"""
|
||||
|
||||
# print("--------------------")
|
||||
# print(cur.mogrify(main_pg_query, params))
|
||||
# print("--------------------")
|
||||
cur.execute(cur.mogrify(main_pg_query, params))
|
||||
row = cur.fetchone()
|
||||
if row is None:
|
||||
return {"errors": ["error not found"]}
|
||||
row["tags"] = __process_tags(row)
|
||||
|
||||
query = cur.mogrify(
|
||||
f"""SELECT error_id, status, session_id, start_ts,
|
||||
parent_error_id,session_id, user_anonymous_id,
|
||||
user_id, user_uuid, user_browser, user_browser_version,
|
||||
user_os, user_os_version, user_device, payload,
|
||||
FALSE AS favorite,
|
||||
True AS viewed
|
||||
FROM public.errors AS pe
|
||||
INNER JOIN events.errors AS ee USING (error_id)
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE pe.project_id = %(project_id)s
|
||||
AND error_id = %(error_id)s
|
||||
ORDER BY start_ts DESC
|
||||
LIMIT 1;""",
|
||||
{"project_id": project_id, "error_id": error_id, "user_id": user_id})
|
||||
cur.execute(query=query)
|
||||
status = cur.fetchone()
|
||||
|
||||
if status is not None:
|
||||
row["stack"] = errors_helper.format_first_stack_frame(status).pop("stack")
|
||||
row["status"] = status.pop("status")
|
||||
row["parent_error_id"] = status.pop("parent_error_id")
|
||||
row["favorite"] = status.pop("favorite")
|
||||
row["viewed"] = status.pop("viewed")
|
||||
row["last_hydrated_session"] = status
|
||||
else:
|
||||
row["stack"] = []
|
||||
row["last_hydrated_session"] = None
|
||||
row["status"] = "untracked"
|
||||
row["parent_error_id"] = None
|
||||
row["favorite"] = False
|
||||
row["viewed"] = False
|
||||
return {"data": helper.dict_to_camel_case(row)}
|
||||
|
||||
|
||||
def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate",
|
||||
endTime_arg_name="endDate", chart=False, step_size_name="step_size",
|
||||
project_key="project_id"):
|
||||
if project_key is None:
|
||||
ch_sub_query = []
|
||||
else:
|
||||
ch_sub_query = [f"{project_key} =%(project_id)s"]
|
||||
if time_constraint:
|
||||
ch_sub_query += [f"timestamp >= %({startTime_arg_name})s",
|
||||
f"timestamp < %({endTime_arg_name})s"]
|
||||
if chart:
|
||||
ch_sub_query += [f"timestamp >= generated_timestamp",
|
||||
f"timestamp < generated_timestamp + %({step_size_name})s"]
|
||||
if platform == schemas.PlatformType.MOBILE:
|
||||
ch_sub_query.append("user_device_type = 'mobile'")
|
||||
elif platform == schemas.PlatformType.DESKTOP:
|
||||
ch_sub_query.append("user_device_type = 'desktop'")
|
||||
return ch_sub_query
|
||||
|
||||
|
||||
def __get_sort_key(key):
|
||||
return {
|
||||
schemas.ErrorSort.OCCURRENCE: "max_datetime",
|
||||
schemas.ErrorSort.USERS_COUNT: "users",
|
||||
schemas.ErrorSort.SESSIONS_COUNT: "sessions"
|
||||
}.get(key, 'max_datetime')
|
||||
|
||||
|
||||
def search(data: schemas.SearchErrorsSchema, project_id, user_id):
|
||||
empty_response = {
|
||||
'total': 0,
|
||||
'errors': []
|
||||
}
|
||||
|
||||
platform = None
|
||||
for f in data.filters:
|
||||
if f.type == schemas.FilterType.PLATFORM and len(f.value) > 0:
|
||||
platform = f.value[0]
|
||||
pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id")
|
||||
pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
|
||||
"pe.project_id=%(project_id)s"]
|
||||
# To ignore Script error
|
||||
pg_sub_query.append("pe.message!='Script error.'")
|
||||
pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True, project_key=None)
|
||||
if platform:
|
||||
pg_sub_query_chart += ["start_ts>=%(startDate)s", "start_ts<%(endDate)s", "project_id=%(project_id)s"]
|
||||
pg_sub_query_chart.append("errors.error_id =details.error_id")
|
||||
statuses = []
|
||||
error_ids = None
|
||||
if data.startTimestamp is None:
|
||||
data.startTimestamp = TimeUTC.now(-30)
|
||||
if data.endTimestamp is None:
|
||||
data.endTimestamp = TimeUTC.now(1)
|
||||
if len(data.events) > 0 or len(data.filters) > 0:
|
||||
print("-- searching for sessions before errors")
|
||||
statuses = sessions.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True,
|
||||
error_status=data.status)
|
||||
if len(statuses) == 0:
|
||||
return empty_response
|
||||
error_ids = [e["errorId"] for e in statuses]
|
||||
with pg_client.PostgresClient() as cur:
|
||||
step_size = __get_step_size(data.startTimestamp, data.endTimestamp, data.density, factor=1)
|
||||
sort = __get_sort_key('datetime')
|
||||
if data.sort is not None:
|
||||
sort = __get_sort_key(data.sort)
|
||||
order = schemas.SortOrderType.DESC
|
||||
if data.order is not None:
|
||||
order = data.order
|
||||
extra_join = ""
|
||||
|
||||
params = {
|
||||
"startDate": data.startTimestamp,
|
||||
"endDate": data.endTimestamp,
|
||||
"project_id": project_id,
|
||||
"userId": user_id,
|
||||
"step_size": step_size}
|
||||
if data.status != schemas.ErrorStatus.ALL:
|
||||
pg_sub_query.append("status = %(error_status)s")
|
||||
params["error_status"] = data.status
|
||||
if data.limit is not None and data.page is not None:
|
||||
params["errors_offset"] = (data.page - 1) * data.limit
|
||||
params["errors_limit"] = data.limit
|
||||
else:
|
||||
params["errors_offset"] = 0
|
||||
params["errors_limit"] = 200
|
||||
|
||||
if error_ids is not None:
|
||||
params["error_ids"] = tuple(error_ids)
|
||||
pg_sub_query.append("error_id IN %(error_ids)s")
|
||||
# if data.bookmarked:
|
||||
# pg_sub_query.append("ufe.user_id = %(userId)s")
|
||||
# extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
|
||||
if data.query is not None and len(data.query) > 0:
|
||||
pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)")
|
||||
params["error_query"] = helper.values_for_operator(value=data.query,
|
||||
op=schemas.SearchEventOperator.CONTAINS)
|
||||
|
||||
main_pg_query = f"""SELECT full_count,
|
||||
error_id,
|
||||
name,
|
||||
message,
|
||||
users,
|
||||
sessions,
|
||||
last_occurrence,
|
||||
first_occurrence,
|
||||
chart
|
||||
FROM (SELECT COUNT(details) OVER () AS full_count, details.*
|
||||
FROM (SELECT error_id,
|
||||
name,
|
||||
message,
|
||||
COUNT(DISTINCT COALESCE(user_id,user_uuid::text)) AS users,
|
||||
COUNT(DISTINCT session_id) AS sessions,
|
||||
MAX(timestamp) AS max_datetime,
|
||||
MIN(timestamp) AS min_datetime
|
||||
FROM events.errors
|
||||
INNER JOIN public.errors AS pe USING (error_id)
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
{extra_join}
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
GROUP BY error_id, name, message
|
||||
ORDER BY {sort} {order}) AS details
|
||||
LIMIT %(errors_limit)s OFFSET %(errors_offset)s
|
||||
) AS details
|
||||
INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence,
|
||||
MIN(timestamp) AS first_occurrence
|
||||
FROM events.errors
|
||||
WHERE errors.error_id = details.error_id) AS time_details ON (TRUE)
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(chart_details) AS chart
|
||||
FROM (SELECT generated_timestamp AS timestamp,
|
||||
COUNT(session_id) AS count
|
||||
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
|
||||
LEFT JOIN LATERAL (SELECT DISTINCT session_id
|
||||
FROM events.errors
|
||||
{"INNER JOIN public.sessions USING(session_id)" if platform else ""}
|
||||
WHERE {" AND ".join(pg_sub_query_chart)}
|
||||
) AS sessions ON (TRUE)
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp) AS chart_details) AS chart_details ON (TRUE);"""
|
||||
|
||||
# print("--------------------")
|
||||
# print(cur.mogrify(main_pg_query, params))
|
||||
# print("--------------------")
|
||||
|
||||
cur.execute(cur.mogrify(main_pg_query, params))
|
||||
rows = cur.fetchall()
|
||||
total = 0 if len(rows) == 0 else rows[0]["full_count"]
|
||||
|
||||
if total == 0:
|
||||
rows = []
|
||||
else:
|
||||
if len(statuses) == 0:
|
||||
query = cur.mogrify(
|
||||
"""SELECT error_id,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_viewed_errors AS ve
|
||||
WHERE errors.error_id = ve.error_id
|
||||
AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed
|
||||
FROM public.errors
|
||||
WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
|
||||
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
|
||||
"user_id": user_id})
|
||||
cur.execute(query=query)
|
||||
statuses = helper.list_to_camel_case(cur.fetchall())
|
||||
statuses = {
|
||||
s["errorId"]: s for s in statuses
|
||||
}
|
||||
|
||||
for r in rows:
|
||||
r.pop("full_count")
|
||||
if r["error_id"] in statuses:
|
||||
r["viewed"] = statuses[r["error_id"]]["viewed"]
|
||||
else:
|
||||
r["viewed"] = False
|
||||
|
||||
return {
|
||||
'total': total,
|
||||
'errors': helper.list_to_camel_case(rows)
|
||||
}
|
||||
|
||||
|
||||
def __save_stacktrace(error_id, data):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""UPDATE public.errors
|
||||
SET stacktrace=%(data)s::jsonb, stacktrace_parsed_at=timezone('utc'::text, now())
|
||||
WHERE error_id = %(error_id)s;""",
|
||||
{"error_id": error_id, "data": json.dumps(data)})
|
||||
cur.execute(query=query)
|
||||
|
||||
|
||||
def get_trace(project_id, error_id):
|
||||
error = get(error_id=error_id, family=False)
|
||||
if error is None:
|
||||
return {"errors": ["error not found"]}
|
||||
if error.get("source", "") != "js_exception":
|
||||
return {"errors": ["this source of errors doesn't have a sourcemap"]}
|
||||
if error.get("payload") is None:
|
||||
return {"errors": ["null payload"]}
|
||||
if error.get("stacktrace") is not None:
|
||||
return {"sourcemapUploaded": True,
|
||||
"trace": error.get("stacktrace"),
|
||||
"preparsed": True}
|
||||
trace, all_exists = sourcemaps.get_traces_group(project_id=project_id, payload=error["payload"])
|
||||
if all_exists:
|
||||
__save_stacktrace(error_id=error_id, data=trace)
|
||||
return {"sourcemapUploaded": all_exists,
|
||||
"trace": trace,
|
||||
"preparsed": False}
|
||||
|
||||
|
||||
def get_sessions(start_date, end_date, project_id, user_id, error_id):
|
||||
extra_constraints = ["s.project_id = %(project_id)s",
|
||||
"s.start_ts >= %(startDate)s",
|
||||
"s.start_ts <= %(endDate)s",
|
||||
"e.error_id = %(error_id)s"]
|
||||
if start_date is None:
|
||||
start_date = TimeUTC.now(-7)
|
||||
if end_date is None:
|
||||
end_date = TimeUTC.now()
|
||||
|
||||
params = {
|
||||
"startDate": start_date,
|
||||
"endDate": end_date,
|
||||
"project_id": project_id,
|
||||
"userId": user_id,
|
||||
"error_id": error_id}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
f"""SELECT s.project_id,
|
||||
s.session_id::text AS session_id,
|
||||
s.user_uuid,
|
||||
s.user_id,
|
||||
s.user_agent,
|
||||
s.user_os,
|
||||
s.user_browser,
|
||||
s.user_device,
|
||||
s.user_country,
|
||||
s.start_ts,
|
||||
s.duration,
|
||||
s.events_count,
|
||||
s.pages_count,
|
||||
s.errors_count,
|
||||
s.issue_types,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_favorite_sessions AS fs
|
||||
WHERE s.session_id = fs.session_id
|
||||
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS favorite,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_viewed_sessions AS fs
|
||||
WHERE s.session_id = fs.session_id
|
||||
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
|
||||
FROM public.sessions AS s INNER JOIN events.errors AS e USING (session_id)
|
||||
WHERE {" AND ".join(extra_constraints)}
|
||||
ORDER BY s.start_ts DESC;""",
|
||||
params)
|
||||
cur.execute(query=query)
|
||||
sessions_list = []
|
||||
total = cur.rowcount
|
||||
row = cur.fetchone()
|
||||
while row is not None and len(sessions_list) < 100:
|
||||
sessions_list.append(row)
|
||||
row = cur.fetchone()
|
||||
|
||||
return {
|
||||
'total': total,
|
||||
'sessions': helper.list_to_camel_case(sessions_list)
|
||||
}
|
||||
|
||||
|
||||
ACTION_STATE = {
|
||||
"unsolve": 'unresolved',
|
||||
"solve": 'resolved',
|
||||
"ignore": 'ignored'
|
||||
}
|
||||
|
||||
|
||||
def change_state(project_id, user_id, error_id, action):
|
||||
errors = get(error_id, family=True)
|
||||
print(len(errors))
|
||||
status = ACTION_STATE.get(action)
|
||||
if errors is None or len(errors) == 0:
|
||||
return {"errors": ["error not found"]}
|
||||
if errors[0]["status"] == status:
|
||||
return {"errors": [f"error is already {status}"]}
|
||||
|
||||
if errors[0]["status"] == ACTION_STATE["solve"] and status == ACTION_STATE["ignore"]:
|
||||
return {"errors": [f"state transition not permitted {errors[0]['status']} -> {status}"]}
|
||||
|
||||
params = {
|
||||
"userId": user_id,
|
||||
"error_ids": tuple([e["errorId"] for e in errors]),
|
||||
"status": status}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""UPDATE public.errors
|
||||
SET status = %(status)s
|
||||
WHERE error_id IN %(error_ids)s
|
||||
RETURNING status""",
|
||||
params)
|
||||
cur.execute(query=query)
|
||||
row = cur.fetchone()
|
||||
if row is not None:
|
||||
for e in errors:
|
||||
e["status"] = row["status"]
|
||||
return {"data": errors}
|
||||
14
ee/api/chalicelib/core/errors/__init__.py
Normal file
14
ee/api/chalicelib/core/errors/__init__.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental error search")
|
||||
from . import errors as errors_legacy
|
||||
from . import errors_ch as errors
|
||||
else:
|
||||
from . import errors
|
||||
|
||||
from . import errors_viewed_ee as errors_viewed
|
||||
|
|
@ -1,13 +1,14 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
from chalicelib.core.errors.errors_viewed import *
|
||||
from chalicelib.utils import ch_client, exp_ch_helper
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
_add_viewed_error = add_viewed_error
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def add_viewed_error(project_id, user_id, error_id):
|
||||
_add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id)
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = f"""INSERT INTO {exp_ch_helper.get_user_viewed_errors_table()}(project_id,user_id, error_id)
|
||||
VALUES (%(project_id)s,%(userId)s,%(error_id)s);"""
|
||||
|
|
@ -1,39 +0,0 @@
|
|||
from chalicelib.utils import pg_client
|
||||
from chalicelib.core import errors_viewed_exp
|
||||
|
||||
|
||||
def add_viewed_error(project_id, user_id, error_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""INSERT INTO public.user_viewed_errors(user_id, error_id)
|
||||
VALUES (%(userId)s,%(error_id)s);""",
|
||||
{"userId": user_id, "error_id": error_id})
|
||||
)
|
||||
errors_viewed_exp.add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id)
|
||||
|
||||
|
||||
def viewed_error_exists(user_id, error_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""SELECT
|
||||
errors.error_id AS hydrated,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_viewed_errors AS ve
|
||||
WHERE ve.error_id = %(error_id)s
|
||||
AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
|
||||
FROM public.errors
|
||||
WHERE error_id = %(error_id)s""",
|
||||
{"userId": user_id, "error_id": error_id})
|
||||
cur.execute(
|
||||
query=query
|
||||
)
|
||||
r = cur.fetchone()
|
||||
if r:
|
||||
return r.get("viewed")
|
||||
return True
|
||||
|
||||
|
||||
def viewed_error(project_id, user_id, error_id):
|
||||
if viewed_error_exists(user_id=user_id, error_id=error_id):
|
||||
return None
|
||||
return add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id)
|
||||
|
|
@ -1,223 +0,0 @@
|
|||
from typing import Optional
|
||||
|
||||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import issues
|
||||
from chalicelib.core.sessions import sessions_metas
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.event_filter_definition import SupportedFilter, Event
|
||||
|
||||
if config("EXP_AUTOCOMPLETE", cast=bool, default=False):
|
||||
from . import autocomplete_exp as autocomplete
|
||||
else:
|
||||
from . import autocomplete as autocomplete
|
||||
|
||||
|
||||
def get_customs_by_session_id(session_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT
|
||||
c.*,
|
||||
'CUSTOM' AS type
|
||||
FROM events_common.customs AS c
|
||||
WHERE
|
||||
c.session_id = %(session_id)s
|
||||
ORDER BY c.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
return helper.dict_to_camel_case(rows)
|
||||
|
||||
|
||||
def __merge_cells(rows, start, count, replacement):
|
||||
rows[start] = replacement
|
||||
rows = rows[:start + 1] + rows[start + count:]
|
||||
return rows
|
||||
|
||||
|
||||
def __get_grouped_clickrage(rows, session_id, project_id):
|
||||
click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id)
|
||||
if len(click_rage_issues) == 0:
|
||||
return rows
|
||||
|
||||
for c in click_rage_issues:
|
||||
merge_count = c.get("payload")
|
||||
if merge_count is not None:
|
||||
merge_count = merge_count.get("Count", 3)
|
||||
else:
|
||||
merge_count = 3
|
||||
for i in range(len(rows)):
|
||||
if rows[i]["timestamp"] == c["timestamp"]:
|
||||
rows = __merge_cells(rows=rows,
|
||||
start=i,
|
||||
count=merge_count,
|
||||
replacement={**rows[i], "type": "CLICKRAGE", "count": merge_count})
|
||||
break
|
||||
return rows
|
||||
|
||||
|
||||
def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
rows = []
|
||||
if event_type is None or event_type == schemas.EventType.CLICK:
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT
|
||||
c.*,
|
||||
'CLICK' AS type
|
||||
FROM events.clicks AS c
|
||||
WHERE
|
||||
c.session_id = %(session_id)s
|
||||
ORDER BY c.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows += cur.fetchall()
|
||||
if group_clickrage:
|
||||
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
|
||||
if event_type is None or event_type == schemas.EventType.INPUT:
|
||||
cur.execute(cur.mogrify("""
|
||||
SELECT
|
||||
i.*,
|
||||
'INPUT' AS type
|
||||
FROM events.inputs AS i
|
||||
WHERE
|
||||
i.session_id = %(session_id)s
|
||||
ORDER BY i.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows += cur.fetchall()
|
||||
if event_type is None or event_type == schemas.EventType.LOCATION:
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT
|
||||
l.*,
|
||||
l.path AS value,
|
||||
l.path AS url,
|
||||
'LOCATION' AS type
|
||||
FROM events.pages AS l
|
||||
WHERE
|
||||
l.session_id = %(session_id)s
|
||||
ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
|
||||
rows += cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"]))
|
||||
return rows
|
||||
|
||||
|
||||
def _search_tags(project_id, value, key=None, source=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = f"""
|
||||
SELECT public.tags.name
|
||||
'TAG' AS type
|
||||
FROM public.tags
|
||||
WHERE public.tags.project_id = %(project_id)s
|
||||
ORDER BY SIMILARITY(public.tags.name, %(value)s) DESC
|
||||
LIMIT 10
|
||||
"""
|
||||
query = cur.mogrify(query, {'project_id': project_id, 'value': value})
|
||||
cur.execute(query)
|
||||
results = helper.list_to_camel_case(cur.fetchall())
|
||||
return results
|
||||
|
||||
|
||||
class EventType:
|
||||
CLICK = Event(ui_type=schemas.EventType.CLICK, table="events.clicks", column="label")
|
||||
INPUT = Event(ui_type=schemas.EventType.INPUT, table="events.inputs", column="label")
|
||||
LOCATION = Event(ui_type=schemas.EventType.LOCATION, table="events.pages", column="path")
|
||||
CUSTOM = Event(ui_type=schemas.EventType.CUSTOM, table="events_common.customs", column="name")
|
||||
REQUEST = Event(ui_type=schemas.EventType.REQUEST, table="events_common.requests", column="path")
|
||||
GRAPHQL = Event(ui_type=schemas.EventType.GRAPHQL, table="events.graphql", column="name")
|
||||
STATEACTION = Event(ui_type=schemas.EventType.STATE_ACTION, table="events.state_actions", column="name")
|
||||
TAG = Event(ui_type=schemas.EventType.TAG, table="events.tags", column="tag_id")
|
||||
ERROR = Event(ui_type=schemas.EventType.ERROR, table="events.errors",
|
||||
column=None) # column=None because errors are searched by name or message
|
||||
METADATA = Event(ui_type=schemas.FilterType.METADATA, table="public.sessions", column=None)
|
||||
# MOBILE
|
||||
CLICK_MOBILE = Event(ui_type=schemas.EventType.CLICK_MOBILE, table="events_ios.taps", column="label")
|
||||
INPUT_MOBILE = Event(ui_type=schemas.EventType.INPUT_MOBILE, table="events_ios.inputs", column="label")
|
||||
VIEW_MOBILE = Event(ui_type=schemas.EventType.VIEW_MOBILE, table="events_ios.views", column="name")
|
||||
SWIPE_MOBILE = Event(ui_type=schemas.EventType.SWIPE_MOBILE, table="events_ios.swipes", column="label")
|
||||
CUSTOM_MOBILE = Event(ui_type=schemas.EventType.CUSTOM_MOBILE, table="events_common.customs", column="name")
|
||||
REQUEST_MOBILE = Event(ui_type=schemas.EventType.REQUEST_MOBILE, table="events_common.requests", column="path")
|
||||
CRASH_MOBILE = Event(ui_type=schemas.EventType.ERROR_MOBILE, table="events_common.crashes",
|
||||
column=None) # column=None because errors are searched by name or message
|
||||
|
||||
|
||||
SUPPORTED_TYPES = {
|
||||
EventType.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK),
|
||||
query=autocomplete.__generic_query(typename=EventType.CLICK.ui_type)),
|
||||
EventType.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT),
|
||||
query=autocomplete.__generic_query(typename=EventType.INPUT.ui_type)),
|
||||
EventType.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.LOCATION),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.LOCATION.ui_type)),
|
||||
EventType.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM),
|
||||
query=autocomplete.__generic_query(typename=EventType.CUSTOM.ui_type)),
|
||||
EventType.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.REQUEST.ui_type)),
|
||||
EventType.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.GRAPHQL),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.GRAPHQL.ui_type)),
|
||||
EventType.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.STATEACTION),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.STATEACTION.ui_type)),
|
||||
EventType.TAG.ui_type: SupportedFilter(get=_search_tags, query=None),
|
||||
EventType.ERROR.ui_type: SupportedFilter(get=autocomplete.__search_errors,
|
||||
query=None),
|
||||
EventType.METADATA.ui_type: SupportedFilter(get=autocomplete.__search_metadata,
|
||||
query=None),
|
||||
# IOS
|
||||
EventType.CLICK_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.CLICK_MOBILE.ui_type)),
|
||||
EventType.INPUT_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.INPUT_MOBILE.ui_type)),
|
||||
EventType.VIEW_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.VIEW_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.VIEW_MOBILE.ui_type)),
|
||||
EventType.CUSTOM_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.CUSTOM_MOBILE.ui_type)),
|
||||
EventType.REQUEST_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST_MOBILE),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.REQUEST_MOBILE.ui_type)),
|
||||
EventType.CRASH_MOBILE.ui_type: SupportedFilter(get=autocomplete.__search_errors_mobile,
|
||||
query=None),
|
||||
}
|
||||
|
||||
|
||||
def get_errors_by_session_id(session_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(f"""\
|
||||
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
|
||||
FROM {EventType.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
|
||||
WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
|
||||
ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
|
||||
errors = cur.fetchall()
|
||||
for e in errors:
|
||||
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
|
||||
return helper.list_to_camel_case(errors)
|
||||
|
||||
|
||||
def search(text, event_type, project_id, source, key):
|
||||
if not event_type:
|
||||
return {"data": autocomplete.__get_autocomplete_table(text, project_id)}
|
||||
|
||||
if event_type in SUPPORTED_TYPES.keys():
|
||||
rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source)
|
||||
# for MOBILE events autocomplete
|
||||
# if event_type + "_MOBILE" in SUPPORTED_TYPES.keys():
|
||||
# rows += SUPPORTED_TYPES[event_type + "_MOBILE"].get(project_id=project_id, value=text, key=key,source=source)
|
||||
elif event_type + "_MOBILE" in SUPPORTED_TYPES.keys():
|
||||
rows = SUPPORTED_TYPES[event_type + "_MOBILE"].get(project_id=project_id, value=text, key=key, source=source)
|
||||
elif event_type in sessions_metas.SUPPORTED_TYPES.keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
elif event_type.endswith("_MOBILE") \
|
||||
and event_type[:-len("_MOBILE")] in sessions_metas.SUPPORTED_TYPES.keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
else:
|
||||
return {"errors": ["unsupported event"]}
|
||||
|
||||
return {"data": rows}
|
||||
|
|
@ -1,571 +0,0 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import sessions_mobs, events
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
from chalicelib.core import sessions_ch as sessions
|
||||
else:
|
||||
from chalicelib.core import sessions
|
||||
|
||||
from chalicelib.utils import pg_client, helper, ch_client, exp_ch_helper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_by_url(project_id, data: schemas.GetHeatMapPayloadSchema):
|
||||
if data.url is None or data.url == "":
|
||||
return []
|
||||
args = {"startDate": data.startTimestamp, "endDate": data.endTimestamp,
|
||||
"project_id": project_id, "url": data.url}
|
||||
constraints = ["main_events.project_id = toUInt16(%(project_id)s)",
|
||||
"main_events.datetime >= toDateTime(%(startDate)s/1000)",
|
||||
"main_events.datetime <= toDateTime(%(endDate)s/1000)",
|
||||
"main_events.event_type='CLICK'",
|
||||
"isNotNull(main_events.normalized_x)"]
|
||||
if data.operator == schemas.SearchEventOperator.IS:
|
||||
constraints.append("url_path= %(url)s")
|
||||
else:
|
||||
constraints.append("url_path ILIKE %(url)s")
|
||||
args["url"] = helper.values_for_operator(data.url, data.operator)
|
||||
|
||||
query_from = f"{exp_ch_helper.get_main_events_table(data.startTimestamp)} AS main_events"
|
||||
# TODO: is this used ?
|
||||
# has_click_rage_filter = False
|
||||
# if len(data.filters) > 0:
|
||||
# for i, f in enumerate(data.filters):
|
||||
# if f.type == schemas.FilterType.issue and len(f.value) > 0:
|
||||
# has_click_rage_filter = True
|
||||
# query_from += """INNER JOIN events_common.issues USING (timestamp, session_id)
|
||||
# INNER JOIN issues AS mis USING (issue_id)
|
||||
# INNER JOIN LATERAL (
|
||||
# SELECT COUNT(1) AS real_count
|
||||
# FROM events.clicks AS sc
|
||||
# INNER JOIN sessions as ss USING (session_id)
|
||||
# WHERE ss.project_id = 2
|
||||
# AND (sc.url = %(url)s OR sc.path = %(url)s)
|
||||
# AND sc.timestamp >= %(startDate)s
|
||||
# AND sc.timestamp <= %(endDate)s
|
||||
# AND ss.start_ts >= %(startDate)s
|
||||
# AND ss.start_ts <= %(endDate)s
|
||||
# AND sc.selector = clicks.selector) AS r_clicks ON (TRUE)"""
|
||||
# constraints += ["mis.project_id = %(project_id)s",
|
||||
# "issues.timestamp >= %(startDate)s",
|
||||
# "issues.timestamp <= %(endDate)s"]
|
||||
# f_k = f"issue_value{i}"
|
||||
# args = {**args, **sh.multi_values(f.value, value_key=f_k)}
|
||||
# constraints.append(sh.multi_conditions(f"%({f_k})s = ANY (issue_types)",
|
||||
# f.value, value_key=f_k))
|
||||
# constraints.append(sh.multi_conditions(f"mis.type = %({f_k})s",
|
||||
# f.value, value_key=f_k))
|
||||
# TODO: change this once click-rage is fixed
|
||||
# if data.click_rage and not has_click_rage_filter:
|
||||
# constraints.append("""(issues_t.session_id IS NULL
|
||||
# OR (issues_t.datetime >= toDateTime(%(startDate)s/1000)
|
||||
# AND issues_t.datetime <= toDateTime(%(endDate)s/1000)
|
||||
# AND issues_t.project_id = toUInt16(%(project_id)s)
|
||||
# AND issues_t.event_type = 'ISSUE'
|
||||
# AND issues_t.project_id = toUInt16(%(project_id)s)
|
||||
# AND mis.project_id = toUInt16(%(project_id)s)
|
||||
# AND mis.type='click_rage'))""")
|
||||
# query_from += """ LEFT JOIN experimental.events AS issues_t ON (main_events.session_id=issues_t.session_id)
|
||||
# LEFT JOIN experimental.issues AS mis ON (issues_t.issue_id=mis.issue_id)"""
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = cur.format(f"""SELECT main_events.normalized_x AS normalized_x,
|
||||
main_events.normalized_y AS normalized_y
|
||||
FROM {query_from}
|
||||
WHERE {" AND ".join(constraints)}
|
||||
LIMIT 500;""", args)
|
||||
logger.debug("---------")
|
||||
logger.debug(query)
|
||||
logger.debug("---------")
|
||||
try:
|
||||
rows = cur.execute(query)
|
||||
except Exception as err:
|
||||
logger.warning("--------- HEATMAP 2 SEARCH QUERY EXCEPTION CH -----------")
|
||||
logger.warning(query)
|
||||
logger.warning("--------- PAYLOAD -----------")
|
||||
logger.warning(data)
|
||||
logger.warning("--------------------")
|
||||
raise err
|
||||
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
def get_x_y_by_url_and_session_id(project_id, session_id, data: schemas.GetHeatMapPayloadSchema):
|
||||
args = {"project_id": project_id, "session_id": session_id, "url": data.url}
|
||||
constraints = ["main_events.project_id = toUInt16(%(project_id)s)",
|
||||
"main_events.session_id = %(session_id)s",
|
||||
"main_events.event_type='CLICK'",
|
||||
"isNotNull(main_events.normalized_x)"]
|
||||
if data.operator == schemas.SearchEventOperator.IS:
|
||||
constraints.append("main_events.url_path = %(url)s")
|
||||
else:
|
||||
constraints.append("main_events.url_path ILIKE %(url)s")
|
||||
args["url"] = helper.values_for_operator(data.url, data.operator)
|
||||
|
||||
query_from = f"{exp_ch_helper.get_main_events_table(0)} AS main_events"
|
||||
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = cur.format(f"""SELECT main_events.normalized_x AS normalized_x,
|
||||
main_events.normalized_y AS normalized_y
|
||||
FROM {query_from}
|
||||
WHERE {" AND ".join(constraints)};""", args)
|
||||
logger.debug("---------")
|
||||
logger.debug(query)
|
||||
logger.debug("---------")
|
||||
try:
|
||||
rows = cur.execute(query)
|
||||
except Exception as err:
|
||||
logger.warning("--------- HEATMAP-session_id SEARCH QUERY EXCEPTION CH -----------")
|
||||
logger.warning(query)
|
||||
logger.warning("--------- PAYLOAD -----------")
|
||||
logger.warning(data)
|
||||
logger.warning("--------------------")
|
||||
raise err
|
||||
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
def get_selectors_by_url_and_session_id(project_id, session_id, data: schemas.GetHeatMapPayloadSchema):
|
||||
args = {"project_id": project_id, "session_id": session_id, "url": data.url}
|
||||
constraints = ["main_events.project_id = toUInt16(%(project_id)s)",
|
||||
"main_events.session_id = %(session_id)s",
|
||||
"main_events.event_type='CLICK'"]
|
||||
if data.operator == schemas.SearchEventOperator.IS:
|
||||
constraints.append("main_events.url_path = %(url)s")
|
||||
else:
|
||||
constraints.append("main_events.url_path ILIKE %(url)s")
|
||||
args["url"] = helper.values_for_operator(data.url, data.operator)
|
||||
|
||||
query_from = f"{exp_ch_helper.get_main_events_table(0)} AS main_events"
|
||||
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = cur.format(f"""SELECT main_events.selector AS selector,
|
||||
COUNT(1) AS count
|
||||
FROM {query_from}
|
||||
WHERE {" AND ".join(constraints)}
|
||||
GROUP BY 1
|
||||
ORDER BY count DESC;""", args)
|
||||
logger.debug("---------")
|
||||
logger.debug(query)
|
||||
logger.debug("---------")
|
||||
try:
|
||||
rows = cur.execute(query)
|
||||
except Exception as err:
|
||||
logger.warning("--------- HEATMAP-session_id SEARCH QUERY EXCEPTION CH -----------")
|
||||
logger.warning(query)
|
||||
logger.warning("--------- PAYLOAD -----------")
|
||||
logger.warning(data)
|
||||
logger.warning("--------------------")
|
||||
raise err
|
||||
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
if not config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
# this part is identical to FOSS
|
||||
SESSION_PROJECTION_COLS = """s.project_id,
|
||||
s.session_id::text AS session_id,
|
||||
s.start_ts,
|
||||
s.duration"""
|
||||
|
||||
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema2 | None, session_id: str, project_id: int,
|
||||
start_time: int,
|
||||
end_time: int) -> str | None:
|
||||
full_args = {
|
||||
"sessionId": session_id,
|
||||
"projectId": project_id,
|
||||
"start_time": start_time,
|
||||
"end_time": end_time,
|
||||
}
|
||||
sub_condition = ["session_id = %(sessionId)s"]
|
||||
if location_condition and len(location_condition.value) > 0:
|
||||
f_k = "LOC"
|
||||
op = sh.get_sql_operator(location_condition.operator)
|
||||
full_args = {**full_args, **sh.multi_values(location_condition.value, value_key=f_k)}
|
||||
sub_condition.append(
|
||||
sh.multi_conditions(f'path {op} %({f_k})s', location_condition.value, is_not=False,
|
||||
value_key=f_k))
|
||||
with pg_client.PostgresClient() as cur:
|
||||
main_query = cur.mogrify(f"""WITH paths AS (SELECT DISTINCT path
|
||||
FROM events.clicks
|
||||
WHERE {" AND ".join(sub_condition)})
|
||||
SELECT path, COUNT(1) AS count
|
||||
FROM events.clicks
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
INNER JOIN paths USING (path)
|
||||
WHERE sessions.project_id = %(projectId)s
|
||||
AND clicks.timestamp >= %(start_time)s
|
||||
AND clicks.timestamp <= %(end_time)s
|
||||
AND start_ts >= %(start_time)s
|
||||
AND start_ts <= %(end_time)s
|
||||
AND duration IS NOT NULL
|
||||
GROUP BY path
|
||||
ORDER BY count DESC
|
||||
LIMIT 1;""", full_args)
|
||||
logger.debug("--------------------")
|
||||
logger.debug(main_query)
|
||||
logger.debug("--------------------")
|
||||
try:
|
||||
cur.execute(main_query)
|
||||
except Exception as err:
|
||||
logger.warning("--------- CLICK MAP BEST URL SEARCH QUERY EXCEPTION -----------")
|
||||
logger.warning(main_query.decode('UTF-8'))
|
||||
logger.warning("--------- PAYLOAD -----------")
|
||||
logger.warning(full_args)
|
||||
logger.warning("--------------------")
|
||||
raise err
|
||||
|
||||
url = cur.fetchone()
|
||||
if url is None:
|
||||
return None
|
||||
return url["path"]
|
||||
|
||||
|
||||
def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_id,
|
||||
include_mobs: bool = True, exclude_sessions: list[str] = [],
|
||||
_depth: int = 3):
|
||||
no_platform = True
|
||||
location_condition = None
|
||||
no_click = True
|
||||
for f in data.filters:
|
||||
if f.type == schemas.FilterType.PLATFORM:
|
||||
no_platform = False
|
||||
break
|
||||
for f in data.events:
|
||||
if f.type == schemas.EventType.LOCATION:
|
||||
if len(f.value) == 0:
|
||||
f.operator = schemas.SearchEventOperator.IS_ANY
|
||||
location_condition = f.model_copy()
|
||||
elif f.type == schemas.EventType.CLICK:
|
||||
no_click = False
|
||||
if len(f.value) == 0:
|
||||
f.operator = schemas.SearchEventOperator.IS_ANY
|
||||
if location_condition and not no_click:
|
||||
break
|
||||
|
||||
if no_platform:
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.PLATFORM,
|
||||
value=[schemas.PlatformType.DESKTOP],
|
||||
operator=schemas.SearchEventOperator.IS))
|
||||
if not location_condition:
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
if no_click:
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
|
||||
value=[0],
|
||||
operator=schemas.MathOperator.GREATER))
|
||||
|
||||
full_args, query_part = sessions.search_query_parts(data=data, error_status=None, errors_only=False,
|
||||
favorite_only=data.bookmarked, issue=None,
|
||||
project_id=project_id, user_id=user_id)
|
||||
full_args["exclude_sessions"] = tuple(exclude_sessions)
|
||||
if len(exclude_sessions) > 0:
|
||||
query_part += "\n AND session_id NOT IN %(exclude_sessions)s"
|
||||
with pg_client.PostgresClient() as cur:
|
||||
data.order = schemas.SortOrderType.DESC
|
||||
data.sort = 'duration'
|
||||
main_query = cur.mogrify(f"""SELECT *
|
||||
FROM (SELECT {SESSION_PROJECTION_COLS}
|
||||
{query_part}
|
||||
--ignoring the sort made the query faster (from 6s to 100ms)
|
||||
--ORDER BY {data.sort} {data.order.value}
|
||||
LIMIT 20) AS raw
|
||||
ORDER BY random()
|
||||
LIMIT 1;""", full_args)
|
||||
logger.debug("--------------------")
|
||||
logger.debug(main_query)
|
||||
logger.debug("--------------------")
|
||||
try:
|
||||
cur.execute(main_query)
|
||||
except Exception as err:
|
||||
logger.warning("--------- CLICK MAP SHORT SESSION SEARCH QUERY EXCEPTION -----------")
|
||||
logger.warning(main_query.decode('UTF-8'))
|
||||
logger.warning("--------- PAYLOAD -----------")
|
||||
logger.warning(data.model_dump_json())
|
||||
logger.warning("--------------------")
|
||||
raise err
|
||||
|
||||
session = cur.fetchone()
|
||||
if session:
|
||||
if not location_condition or location_condition.operator == schemas.SearchEventOperator.IS_ANY:
|
||||
session["path"] = __get_1_url(project_id=project_id, session_id=session["session_id"],
|
||||
location_condition=location_condition,
|
||||
start_time=data.startTimestamp, end_time=data.endTimestamp)
|
||||
else:
|
||||
session["path"] = location_condition.value[0]
|
||||
|
||||
if include_mobs:
|
||||
session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id)
|
||||
session['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session["session_id"])
|
||||
if _depth > 0 and len(session['domURL']) == 0 and len(session['mobsUrl']) == 0:
|
||||
return search_short_session(data=data, project_id=project_id, user_id=user_id,
|
||||
include_mobs=include_mobs,
|
||||
exclude_sessions=exclude_sessions + [session["session_id"]],
|
||||
_depth=_depth - 1)
|
||||
elif _depth == 0 and len(session['domURL']) == 0 and len(session['mobsUrl']) == 0:
|
||||
logger.info("couldn't find an existing replay after 3 iterations for heatmap")
|
||||
|
||||
session['events'] = get_page_events(session_id=session["session_id"], project_id=project_id)
|
||||
else:
|
||||
logger.debug("No session found for heatmap")
|
||||
|
||||
return helper.dict_to_camel_case(session)
|
||||
|
||||
|
||||
def get_selected_session(project_id, session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
main_query = cur.mogrify(f"""SELECT {SESSION_PROJECTION_COLS}
|
||||
FROM public.sessions AS s
|
||||
WHERE session_id=%(session_id)s;""", {"session_id": session_id})
|
||||
logger.debug("--------------------")
|
||||
logger.debug(main_query)
|
||||
logger.debug("--------------------")
|
||||
try:
|
||||
cur.execute(main_query)
|
||||
except Exception as err:
|
||||
logger.warning("--------- CLICK MAP GET SELECTED SESSION QUERY EXCEPTION -----------")
|
||||
logger.warning(main_query.decode('UTF-8'))
|
||||
raise err
|
||||
|
||||
session = cur.fetchone()
|
||||
if session:
|
||||
session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id)
|
||||
session['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session["session_id"])
|
||||
if len(session['domURL']) == 0 and len(session['mobsUrl']) == 0:
|
||||
session["_issue"] = "mob file not found"
|
||||
logger.info("can't find selected mob file for heatmap")
|
||||
session['events'] = get_page_events(session_id=session["session_id"], project_id=project_id)
|
||||
|
||||
return helper.dict_to_camel_case(session)
|
||||
|
||||
|
||||
def get_page_events(session_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT
|
||||
message_id,
|
||||
timestamp,
|
||||
host,
|
||||
path,
|
||||
path AS value,
|
||||
path AS url,
|
||||
'LOCATION' AS type
|
||||
FROM events.pages
|
||||
WHERE session_id = %(session_id)s
|
||||
ORDER BY timestamp,message_id;""", {"session_id": session_id}))
|
||||
rows = cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
return rows
|
||||
|
||||
else:
|
||||
# use CH
|
||||
SESSION_PROJECTION_COLS = """s.project_id,
|
||||
s.session_id AS session_id,
|
||||
toUnixTimestamp(s.datetime)*1000 AS start_ts,
|
||||
s.duration AS duration"""
|
||||
|
||||
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema2 | None, session_id: str, project_id: int,
|
||||
start_time: int,
|
||||
end_time: int) -> str | None:
|
||||
full_args = {
|
||||
"sessionId": session_id,
|
||||
"projectId": project_id,
|
||||
"start_time": start_time,
|
||||
"end_time": end_time,
|
||||
}
|
||||
sub_condition = ["session_id = %(sessionId)s", "event_type = 'CLICK'", "project_id = %(projectId)s"]
|
||||
if location_condition and len(location_condition.value) > 0:
|
||||
f_k = "LOC"
|
||||
op = sh.get_sql_operator(location_condition.operator)
|
||||
full_args = {**full_args, **sh.multi_values(location_condition.value, value_key=f_k)}
|
||||
sub_condition.append(
|
||||
sh.multi_conditions(f'path {op} %({f_k})s', location_condition.value, is_not=False,
|
||||
value_key=f_k))
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
main_query = cur.format(f"""WITH paths AS (SELECT DISTINCT url_path
|
||||
FROM experimental.events
|
||||
WHERE {" AND ".join(sub_condition)})
|
||||
SELECT url_path, COUNT(1) AS count
|
||||
FROM experimental.events
|
||||
INNER JOIN paths USING (url_path)
|
||||
WHERE event_type = 'CLICK'
|
||||
AND project_id = %(projectId)s
|
||||
AND datetime >= toDateTime(%(start_time)s / 1000)
|
||||
AND datetime <= toDateTime(%(end_time)s / 1000)
|
||||
GROUP BY url_path
|
||||
ORDER BY count DESC
|
||||
LIMIT 1;""", full_args)
|
||||
logger.debug("--------------------")
|
||||
logger.debug(main_query)
|
||||
logger.debug("--------------------")
|
||||
try:
|
||||
url = cur.execute(main_query)
|
||||
except Exception as err:
|
||||
logger.warning("--------- CLICK MAP BEST URL SEARCH QUERY EXCEPTION CH-----------")
|
||||
logger.warning(main_query.decode('UTF-8'))
|
||||
logger.warning("--------- PAYLOAD -----------")
|
||||
logger.warning(full_args)
|
||||
logger.warning("--------------------")
|
||||
raise err
|
||||
|
||||
if url is None or len(url) == 0:
|
||||
return None
|
||||
return url[0]["url_path"]
|
||||
|
||||
|
||||
def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_id,
|
||||
include_mobs: bool = True, exclude_sessions: list[str] = [],
|
||||
_depth: int = 3):
|
||||
no_platform = True
|
||||
location_condition = None
|
||||
no_click = True
|
||||
for f in data.filters:
|
||||
if f.type == schemas.FilterType.PLATFORM:
|
||||
no_platform = False
|
||||
break
|
||||
for f in data.events:
|
||||
if f.type == schemas.EventType.LOCATION:
|
||||
if len(f.value) == 0:
|
||||
f.operator = schemas.SearchEventOperator.IS_ANY
|
||||
location_condition = f.model_copy()
|
||||
elif f.type == schemas.EventType.CLICK:
|
||||
no_click = False
|
||||
if len(f.value) == 0:
|
||||
f.operator = schemas.SearchEventOperator.IS_ANY
|
||||
if location_condition and not no_click:
|
||||
break
|
||||
|
||||
if no_platform:
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.PLATFORM,
|
||||
value=[schemas.PlatformType.DESKTOP],
|
||||
operator=schemas.SearchEventOperator.IS))
|
||||
if not location_condition:
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
if no_click:
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
|
||||
value=[0],
|
||||
operator=schemas.MathOperator.GREATER))
|
||||
|
||||
full_args, query_part = sessions.search_query_parts_ch(data=data, error_status=None, errors_only=False,
|
||||
favorite_only=data.bookmarked, issue=None,
|
||||
project_id=project_id, user_id=user_id)
|
||||
full_args["exclude_sessions"] = tuple(exclude_sessions)
|
||||
if len(exclude_sessions) > 0:
|
||||
query_part += "\n AND session_id NOT IN (%(exclude_sessions)s)"
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
data.order = schemas.SortOrderType.DESC
|
||||
data.sort = 'duration'
|
||||
main_query = cur.format(f"""SELECT *
|
||||
FROM (SELECT {SESSION_PROJECTION_COLS}
|
||||
{query_part}
|
||||
-- ORDER BY {data.sort} {data.order.value}
|
||||
LIMIT 20) AS raw
|
||||
ORDER BY rand()
|
||||
LIMIT 1;""", full_args)
|
||||
logger.debug("--------------------")
|
||||
logger.debug(main_query)
|
||||
logger.debug("--------------------")
|
||||
try:
|
||||
session = cur.execute(main_query)
|
||||
except Exception as err:
|
||||
logger.warning("--------- CLICK MAP SHORT SESSION SEARCH QUERY EXCEPTION CH -----------")
|
||||
logger.warning(main_query)
|
||||
logger.warning("--------- PAYLOAD -----------")
|
||||
logger.warning(data.model_dump_json())
|
||||
logger.warning("--------------------")
|
||||
raise err
|
||||
|
||||
if len(session) > 0:
|
||||
session = session[0]
|
||||
if not location_condition or location_condition.operator == schemas.SearchEventOperator.IS_ANY:
|
||||
session["path"] = __get_1_url(project_id=project_id, session_id=session["session_id"],
|
||||
location_condition=location_condition,
|
||||
start_time=data.startTimestamp, end_time=data.endTimestamp)
|
||||
else:
|
||||
session["path"] = location_condition.value[0]
|
||||
|
||||
if include_mobs:
|
||||
session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id)
|
||||
session['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session["session_id"])
|
||||
if _depth > 0 and len(session['domURL']) == 0 and len(session['mobsUrl']) == 0:
|
||||
return search_short_session(data=data, project_id=project_id, user_id=user_id,
|
||||
include_mobs=include_mobs,
|
||||
exclude_sessions=exclude_sessions + [session["session_id"]],
|
||||
_depth=_depth - 1)
|
||||
elif _depth == 0 and len(session['domURL']) == 0 and len(session['mobsUrl']) == 0:
|
||||
logger.info("couldn't find an existing replay after 3 iterations for heatmap")
|
||||
|
||||
session['events'] = events.get_by_session_id(project_id=project_id, session_id=session["session_id"],
|
||||
event_type=schemas.EventType.LOCATION)
|
||||
else:
|
||||
return None
|
||||
|
||||
return helper.dict_to_camel_case(session)
|
||||
|
||||
|
||||
def get_selected_session(project_id, session_id):
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
main_query = cur.format(f"""SELECT {SESSION_PROJECTION_COLS}
|
||||
FROM experimental.sessions AS s
|
||||
WHERE session_id=%(session_id)s;""", {"session_id": session_id})
|
||||
logger.debug("--------------------")
|
||||
logger.debug(main_query)
|
||||
logger.debug("--------------------")
|
||||
try:
|
||||
session = cur.execute(main_query)
|
||||
except Exception as err:
|
||||
logger.warning("--------- CLICK MAP GET SELECTED SESSION QUERY EXCEPTION -----------")
|
||||
logger.warning(main_query.decode('UTF-8'))
|
||||
raise err
|
||||
if len(session) > 0:
|
||||
session = session[0]
|
||||
else:
|
||||
session = None
|
||||
|
||||
if session:
|
||||
session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id)
|
||||
session['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session["session_id"])
|
||||
if len(session['domURL']) == 0 and len(session['mobsUrl']) == 0:
|
||||
session["_issue"] = "mob file not found"
|
||||
logger.info("can't find selected mob file for heatmap")
|
||||
session['events'] = get_page_events(session_id=session["session_id"], project_id=project_id)
|
||||
|
||||
return helper.dict_to_camel_case(session)
|
||||
|
||||
|
||||
def get_page_events(session_id, project_id):
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
rows = cur.execute("""\
|
||||
SELECT
|
||||
message_id,
|
||||
toUnixTimestamp(datetime)*1000 AS timestamp,
|
||||
url_host AS host,
|
||||
url_path AS path,
|
||||
url_path AS value,
|
||||
url_path AS url,
|
||||
'LOCATION' AS type
|
||||
FROM experimental.events
|
||||
WHERE session_id = %(session_id)s
|
||||
AND event_type='LOCATION'
|
||||
AND project_id= %(project_id)s
|
||||
ORDER BY datetime,message_id;""", {"session_id": session_id, "project_id": project_id})
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
return rows
|
||||
|
|
@ -1,67 +0,0 @@
|
|||
import schemas
|
||||
from chalicelib.utils import pg_client
|
||||
|
||||
|
||||
def get_global_integrations_status(tenant_id, user_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
SELECT EXISTS((SELECT 1
|
||||
FROM public.oauth_authentication
|
||||
WHERE user_id = %(user_id)s
|
||||
AND provider = 'github')) AS {schemas.IntegrationType.GITHUB.value},
|
||||
EXISTS((SELECT 1
|
||||
FROM public.jira_cloud
|
||||
WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.JIRA.value},
|
||||
EXISTS((SELECT 1
|
||||
FROM public.integrations
|
||||
WHERE project_id=%(project_id)s
|
||||
AND provider='bugsnag')) AS {schemas.IntegrationType.BUGSNAG.value},
|
||||
EXISTS((SELECT 1
|
||||
FROM public.integrations
|
||||
WHERE project_id=%(project_id)s
|
||||
AND provider='cloudwatch')) AS {schemas.IntegrationType.CLOUDWATCH.value},
|
||||
EXISTS((SELECT 1
|
||||
FROM public.integrations
|
||||
WHERE project_id=%(project_id)s
|
||||
AND provider='datadog')) AS {schemas.IntegrationType.DATADOG.value},
|
||||
EXISTS((SELECT 1
|
||||
FROM public.integrations
|
||||
WHERE project_id=%(project_id)s
|
||||
AND provider='newrelic')) AS {schemas.IntegrationType.NEWRELIC.value},
|
||||
EXISTS((SELECT 1
|
||||
FROM public.integrations
|
||||
WHERE project_id=%(project_id)s
|
||||
AND provider='rollbar')) AS {schemas.IntegrationType.ROLLBAR.value},
|
||||
EXISTS((SELECT 1
|
||||
FROM public.integrations
|
||||
WHERE project_id=%(project_id)s
|
||||
AND provider='sentry')) AS {schemas.IntegrationType.SENTRY.value},
|
||||
EXISTS((SELECT 1
|
||||
FROM public.integrations
|
||||
WHERE project_id=%(project_id)s
|
||||
AND provider='stackdriver')) AS {schemas.IntegrationType.STACKDRIVER.value},
|
||||
EXISTS((SELECT 1
|
||||
FROM public.integrations
|
||||
WHERE project_id=%(project_id)s
|
||||
AND provider='sumologic')) AS {schemas.IntegrationType.SUMOLOGIC.value},
|
||||
EXISTS((SELECT 1
|
||||
FROM public.integrations
|
||||
WHERE project_id=%(project_id)s
|
||||
AND provider='elasticsearch')) AS {schemas.IntegrationType.ELASTICSEARCH.value},
|
||||
EXISTS((SELECT 1
|
||||
FROM public.webhooks
|
||||
WHERE type='slack' AND tenant_id=%(tenant_id)s AND deleted_at ISNULL)) AS {schemas.IntegrationType.SLACK.value},
|
||||
EXISTS((SELECT 1
|
||||
FROM public.webhooks
|
||||
WHERE type='msteams' AND tenant_id=%(tenant_id)s AND deleted_at ISNULL)) AS {schemas.IntegrationType.MS_TEAMS.value},
|
||||
EXISTS((SELECT 1
|
||||
FROM public.integrations
|
||||
WHERE project_id=%(project_id)s AND provider='dynatrace')) AS {schemas.IntegrationType.DYNATRACE.value};""",
|
||||
{"user_id": user_id, "tenant_id": tenant_id, "project_id": project_id})
|
||||
)
|
||||
current_integrations = cur.fetchone()
|
||||
result = []
|
||||
for k in current_integrations.keys():
|
||||
result.append({"name": k, "integrated": current_integrations[k]})
|
||||
return result
|
||||
|
|
@ -1 +0,0 @@
|
|||
TENANT_CONDITION = "tenant_id=%(tenant_id)s"
|
||||
10
ee/api/chalicelib/core/metrics/__init__.py
Normal file
10
ee/api/chalicelib/core/metrics/__init__.py
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from chalicelib.core.metrics import heatmaps_ch as heatmaps
|
||||
from chalicelib.core.metrics import metrics_ch as metrics
|
||||
from chalicelib.core.metrics import custom_metrics_ee as custom_metrics
|
||||
from chalicelib.core.metrics import product_analytics_ch as product_analytics
|
||||
99
ee/api/chalicelib/core/metrics/custom_metrics_ee.py
Normal file
99
ee/api/chalicelib/core/metrics/custom_metrics_ee.py
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
import json
|
||||
import logging
|
||||
|
||||
from decouple import config
|
||||
from chalicelib.utils.storage import extra
|
||||
from chalicelib.core.sessions import sessions_mobs, sessions_favorite
|
||||
from .custom_metrics import *
|
||||
|
||||
|
||||
def create_card(project: schemas.ProjectContext, user_id, data: schemas.CardSchema, dashboard=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
session_data = None
|
||||
if data.metric_type == schemas.MetricType.HEAT_MAP:
|
||||
if data.session_id is not None:
|
||||
session_data = {"sessionId": data.session_id}
|
||||
else:
|
||||
session_data = __get_heat_map_chart(project=project, user_id=user_id,
|
||||
data=data, include_mobs=False)
|
||||
if session_data is not None:
|
||||
session_data = {"sessionId": session_data["sessionId"]}
|
||||
|
||||
if session_data is not None:
|
||||
# for EE only
|
||||
keys = sessions_mobs. \
|
||||
__get_mob_keys(project_id=project.project_id, session_id=session_data["sessionId"])
|
||||
keys += sessions_mobs. \
|
||||
__get_mob_keys_deprecated(session_id=session_data["sessionId"]) # To support old sessions
|
||||
tag = config('RETENTION_L_VALUE', default='vault')
|
||||
for k in keys:
|
||||
try:
|
||||
extra.tag_session(file_key=k, tag_value=tag)
|
||||
except Exception as e:
|
||||
logger.warning(f"!!!Error while tagging: {k} to {tag} for heatMap")
|
||||
logger.error(str(e))
|
||||
|
||||
_data = {"session_data": json.dumps(session_data) if session_data is not None else None}
|
||||
for i, s in enumerate(data.series):
|
||||
for k in s.model_dump().keys():
|
||||
_data[f"{k}_{i}"] = s.__getattribute__(k)
|
||||
_data[f"index_{i}"] = i
|
||||
_data[f"filter_{i}"] = s.filter.json()
|
||||
series_len = len(data.series)
|
||||
params = {"user_id": user_id, "project_id": project.project_id, **data.model_dump(), **_data,
|
||||
"default_config": json.dumps(data.default_config.model_dump()), "card_info": None}
|
||||
if data.metric_type == schemas.MetricType.PATH_ANALYSIS:
|
||||
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))
|
||||
|
||||
query = """INSERT INTO metrics (project_id, user_id, name, is_public,
|
||||
view_type, metric_type, metric_of, metric_value,
|
||||
metric_format, default_config, thumbnail, data,
|
||||
card_info)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
|
||||
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
|
||||
%(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s,
|
||||
%(card_info)s)
|
||||
RETURNING metric_id"""
|
||||
if len(data.series) > 0:
|
||||
query = f"""WITH m AS ({query})
|
||||
INSERT INTO metric_series(metric_id, index, name, filter)
|
||||
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)"
|
||||
for i in range(series_len)])}
|
||||
RETURNING metric_id;"""
|
||||
|
||||
query = cur.mogrify(query, params)
|
||||
cur.execute(query)
|
||||
r = cur.fetchone()
|
||||
if dashboard:
|
||||
return r["metric_id"]
|
||||
return {"data": get_card(metric_id=r["metric_id"], project_id=project.project_id, user_id=user_id)}
|
||||
|
||||
|
||||
def delete_card(project_id, metric_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.metrics
|
||||
SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now())
|
||||
WHERE project_id = %(project_id)s
|
||||
AND metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public)
|
||||
RETURNING data;""",
|
||||
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id})
|
||||
)
|
||||
# for EE only
|
||||
row = cur.fetchone()
|
||||
if row:
|
||||
if row["data"] and not sessions_favorite.favorite_session_exists(session_id=row["data"]["sessionId"]):
|
||||
keys = sessions_mobs. \
|
||||
__get_mob_keys(project_id=project_id, session_id=row["data"]["sessionId"])
|
||||
keys += sessions_mobs. \
|
||||
__get_mob_keys_deprecated(session_id=row["data"]["sessionId"]) # To support old sessions
|
||||
tag = config('RETENTION_D_VALUE', default='default')
|
||||
for k in keys:
|
||||
try:
|
||||
extra.tag_session(file_key=k, tag_value=tag)
|
||||
except Exception as e:
|
||||
logger.warning(f"!!!Error while tagging: {k} to {tag} for heatMap")
|
||||
logger.error(str(e))
|
||||
return {"state": "success"}
|
||||
|
|
@ -1 +1,2 @@
|
|||
TENANT_CONDITION = "tenant_id = %(tenant_id)s"
|
||||
MOB_KEY="encode(file_key,'hex') AS file_key,"
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
from typing import List
|
||||
|
||||
import schemas
|
||||
from chalicelib.core.metrics_ch import __get_basic_constraints, __get_meta_constraint
|
||||
from chalicelib.core.metrics_ch import __get_constraint_values, __complete_missing_steps
|
||||
from chalicelib.core.metrics.metrics_ch import __get_basic_constraints, __get_meta_constraint, __get_constraint_values, \
|
||||
__complete_missing_steps
|
||||
from chalicelib.utils import ch_client, exp_ch_helper
|
||||
from chalicelib.utils import helper, dev
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
|
|
|||
|
|
@ -10,3 +10,7 @@ if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
|||
from . import sessions_ch as sessions
|
||||
else:
|
||||
from . import sessions
|
||||
|
||||
from chalicelib.core.sessions import sessions_devtool_ee as sessions_devtool
|
||||
from chalicelib.core.sessions import sessions_viewed_ee as sessions_viewed
|
||||
from chalicelib.core.sessions import sessions_favorite_ee as sessions_favorite
|
||||
|
|
|
|||
|
|
@ -1,39 +0,0 @@
|
|||
from decouple import config
|
||||
from fastapi.security import SecurityScopes
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import permissions
|
||||
from chalicelib.utils.storage import StorageClient
|
||||
|
||||
SCOPES = SecurityScopes([schemas.Permissions.DEV_TOOLS])
|
||||
|
||||
|
||||
def __get_devtools_keys(project_id, session_id):
|
||||
params = {
|
||||
"sessionId": session_id,
|
||||
"projectId": project_id
|
||||
}
|
||||
return [
|
||||
config("DEVTOOLS_MOB_PATTERN", default="%(sessionId)sdevtools") % params
|
||||
]
|
||||
|
||||
|
||||
def get_urls(session_id, project_id, context: schemas.CurrentContext, check_existence: bool = True):
|
||||
if not permissions.check(security_scopes=SCOPES, context=context):
|
||||
return []
|
||||
results = []
|
||||
for k in __get_devtools_keys(project_id=project_id, session_id=session_id):
|
||||
if check_existence and not StorageClient.exists(bucket=config("sessions_bucket"), key=k):
|
||||
continue
|
||||
results.append(StorageClient.get_presigned_url_for_sharing(
|
||||
bucket=config("sessions_bucket"),
|
||||
expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900),
|
||||
key=k
|
||||
))
|
||||
return results
|
||||
|
||||
|
||||
def delete_mobs(project_id, session_ids):
|
||||
for session_id in session_ids:
|
||||
for k in __get_devtools_keys(project_id=project_id, session_id=session_id):
|
||||
StorageClient.tag_for_deletion(bucket=config("sessions_bucket"), key=k)
|
||||
13
ee/api/chalicelib/core/sessions/sessions_devtool_ee.py
Normal file
13
ee/api/chalicelib/core/sessions/sessions_devtool_ee.py
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
from fastapi.security import SecurityScopes
|
||||
|
||||
from chalicelib.core import permissions
|
||||
from chalicelib.core.sessions.sessions_devtool import *
|
||||
|
||||
_get_urls = get_urls
|
||||
SCOPES = SecurityScopes([schemas.Permissions.DEV_TOOLS])
|
||||
|
||||
|
||||
def get_urls(session_id, project_id, context: schemas.CurrentContext, check_existence: bool = True):
|
||||
if not permissions.check(security_scopes=SCOPES, context=context):
|
||||
return []
|
||||
return _get_urls(session_id=session_id, project_id=project_id, context=context, check_existence=check_existence)
|
||||
|
|
@ -1,97 +0,0 @@
|
|||
import schemas
|
||||
from chalicelib.core import sessions_favorite_exp, sessions_mobs, sessions_devtool
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.utils.storage import extra
|
||||
from decouple import config
|
||||
|
||||
|
||||
def add_favorite_session(context: schemas.CurrentContext, project_id, session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
INSERT INTO public.user_favorite_sessions(user_id, session_id)
|
||||
VALUES (%(userId)s,%(session_id)s)
|
||||
RETURNING session_id;""",
|
||||
{"userId": context.user_id, "session_id": session_id})
|
||||
)
|
||||
row = cur.fetchone()
|
||||
if row:
|
||||
sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id)
|
||||
return {"data": {"sessionId": session_id}}
|
||||
return {"errors": ["something went wrong"]}
|
||||
|
||||
|
||||
def remove_favorite_session(context: schemas.CurrentContext, project_id, session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
DELETE FROM public.user_favorite_sessions
|
||||
WHERE user_id = %(userId)s
|
||||
AND session_id = %(session_id)s
|
||||
RETURNING session_id;""",
|
||||
{"userId": context.user_id, "session_id": session_id})
|
||||
)
|
||||
row = cur.fetchone()
|
||||
if row:
|
||||
sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id)
|
||||
return {"data": {"sessionId": session_id}}
|
||||
return {"errors": ["something went wrong"]}
|
||||
|
||||
|
||||
def favorite_session(context: schemas.CurrentContext, project_id, session_id):
|
||||
keys = sessions_mobs.__get_mob_keys(project_id=project_id, session_id=session_id)
|
||||
keys += sessions_mobs.__get_mob_keys_deprecated(session_id=session_id) # To support old sessions
|
||||
keys += sessions_devtool.__get_devtools_keys(project_id=project_id, session_id=session_id)
|
||||
|
||||
if favorite_session_exists(user_id=context.user_id, session_id=session_id):
|
||||
tag = config('RETENTION_D_VALUE', default='default')
|
||||
|
||||
for k in keys:
|
||||
try:
|
||||
extra.tag_session(file_key=k, tag_value=tag)
|
||||
except Exception as e:
|
||||
print(f"!!!Error while tagging: {k} to {tag} for removal")
|
||||
print(str(e))
|
||||
|
||||
return remove_favorite_session(context=context, project_id=project_id, session_id=session_id)
|
||||
|
||||
tag = config('RETENTION_L_VALUE', default='vault')
|
||||
|
||||
for k in keys:
|
||||
try:
|
||||
extra.tag_session(file_key=k, tag_value=tag)
|
||||
except Exception as e:
|
||||
print(f"!!!Error while tagging: {k} to {tag} for vault")
|
||||
print(str(e))
|
||||
|
||||
return add_favorite_session(context=context, project_id=project_id, session_id=session_id)
|
||||
|
||||
|
||||
def favorite_session_exists(session_id, user_id=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""SELECT session_id
|
||||
FROM public.user_favorite_sessions
|
||||
WHERE
|
||||
session_id = %(session_id)s
|
||||
{'AND user_id = %(userId)s' if user_id else ''};""",
|
||||
{"userId": user_id, "session_id": session_id})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
return r is not None
|
||||
|
||||
|
||||
def get_start_end_timestamp(project_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""SELECT max(start_ts) AS max_start_ts, min(start_ts) AS min_start_ts
|
||||
FROM public.user_favorite_sessions INNER JOIN sessions USING(session_id)
|
||||
WHERE
|
||||
user_favorite_sessions.user_id = %(userId)s
|
||||
AND project_id = %(project_id)s;""",
|
||||
{"userId": user_id, "project_id": project_id})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
return (0, 0) if r is None else (r["min_start_ts"], r["max_start_ts"])
|
||||
75
ee/api/chalicelib/core/sessions/sessions_favorite_ee.py
Normal file
75
ee/api/chalicelib/core/sessions/sessions_favorite_ee.py
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
from chalicelib.utils import ch_client, exp_ch_helper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
from chalicelib.core.sessions import sessions_mobs, sessions_devtool
|
||||
from chalicelib.core.sessions.sessions_favorite import *
|
||||
from chalicelib.utils.storage import extra
|
||||
|
||||
_add_favorite_session = add_favorite_session
|
||||
_remove_favorite_session = remove_favorite_session
|
||||
|
||||
|
||||
def add_favorite_session(context: schemas.CurrentContext, project_id, session_id):
|
||||
result = _add_favorite_session(context=context, project_id=project_id, session_id=session_id)
|
||||
if "data" in result:
|
||||
add_favorite_session_to_ch(project_id=project_id, user_id=context.user_id,
|
||||
session_id=session_id)
|
||||
return result
|
||||
|
||||
|
||||
def remove_favorite_session(context: schemas.CurrentContext, project_id, session_id):
|
||||
result = _remove_favorite_session(context=context, project_id=project_id, session_id=session_id)
|
||||
if "data" in result:
|
||||
remove_favorite_session_from_ch(project_id=project_id, user_id=context.user_id,
|
||||
session_id=session_id)
|
||||
return result
|
||||
|
||||
|
||||
def favorite_session(context: schemas.CurrentContext, project_id, session_id):
|
||||
keys = sessions_mobs.__get_mob_keys(project_id=project_id, session_id=session_id)
|
||||
keys += sessions_mobs.__get_mob_keys_deprecated(session_id=session_id) # To support old sessions
|
||||
keys += sessions_devtool.__get_devtools_keys(project_id=project_id, session_id=session_id)
|
||||
|
||||
if favorite_session_exists(user_id=context.user_id, session_id=session_id):
|
||||
tag = config('RETENTION_D_VALUE', default='default')
|
||||
|
||||
for k in keys:
|
||||
try:
|
||||
extra.tag_session(file_key=k, tag_value=tag)
|
||||
except Exception as e:
|
||||
print(f"!!!Error while tagging: {k} to {tag} for removal")
|
||||
print(str(e))
|
||||
|
||||
return remove_favorite_session(context=context, project_id=project_id, session_id=session_id)
|
||||
|
||||
tag = config('RETENTION_L_VALUE', default='vault')
|
||||
|
||||
for k in keys:
|
||||
try:
|
||||
extra.tag_session(file_key=k, tag_value=tag)
|
||||
except Exception as e:
|
||||
print(f"!!!Error while tagging: {k} to {tag} for vault")
|
||||
print(str(e))
|
||||
|
||||
return add_favorite_session(context=context, project_id=project_id, session_id=session_id)
|
||||
|
||||
|
||||
def add_favorite_session_to_ch(project_id, user_id, session_id, sign=1):
|
||||
try:
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = f"""INSERT INTO {exp_ch_helper.get_user_favorite_sessions_table()}(project_id,user_id, session_id, sign)
|
||||
VALUES (%(project_id)s,%(userId)s,%(sessionId)s,%(sign)s);"""
|
||||
params = {"userId": user_id, "sessionId": session_id, "project_id": project_id, "sign": sign}
|
||||
cur.execute(query=query, params=params)
|
||||
|
||||
except Exception as err:
|
||||
logger.error("------- Exception while adding favorite session to CH")
|
||||
logger.error(err)
|
||||
|
||||
|
||||
def remove_favorite_session_from_ch(project_id, user_id, session_id):
|
||||
add_favorite_session_to_ch(project_id=project_id, user_id=user_id, session_id=session_id, sign=-1)
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
from chalicelib.utils import ch_client, exp_ch_helper
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
|
||||
|
||||
def add_favorite_session(project_id, user_id, session_id, sign=1):
|
||||
try:
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = f"""INSERT INTO {exp_ch_helper.get_user_favorite_sessions_table()}(project_id,user_id, session_id, sign)
|
||||
VALUES (%(project_id)s,%(userId)s,%(sessionId)s,%(sign)s);"""
|
||||
params = {"userId": user_id, "sessionId": session_id, "project_id": project_id, "sign": sign}
|
||||
cur.execute(query=query, params=params)
|
||||
|
||||
except Exception as err:
|
||||
logging.error("------- Exception while adding favorite session to CH")
|
||||
logging.error(err)
|
||||
|
||||
|
||||
def remove_favorite_session(project_id, user_id, session_id):
|
||||
add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id, sign=-1)
|
||||
|
|
@ -4,8 +4,8 @@ from urllib.parse import urljoin
|
|||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core.collaboration_msteams import MSTeams
|
||||
from chalicelib.core.collaboration_slack import Slack
|
||||
from chalicelib.core.collaborations.collaboration_msteams import MSTeams
|
||||
from chalicelib.core.collaborations.collaboration_slack import Slack
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
|
|
|||
|
|
@ -1,157 +0,0 @@
|
|||
import schemas
|
||||
from chalicelib.core import events, metadata, events_mobile, \
|
||||
sessions_mobs, issues, assist, sessions_devtool, canvas, user_testing
|
||||
from chalicelib.utils import errors_helper
|
||||
from chalicelib.utils import pg_client, helper
|
||||
|
||||
|
||||
def __is_mobile_session(platform):
|
||||
return platform in ('ios', 'android')
|
||||
|
||||
|
||||
def __group_metadata(session, project_metadata):
|
||||
meta = {}
|
||||
for m in project_metadata.keys():
|
||||
if project_metadata[m] is not None and session.get(m) is not None:
|
||||
meta[project_metadata[m]] = session[m]
|
||||
session.pop(m)
|
||||
return meta
|
||||
|
||||
|
||||
def get_pre_replay(project_id, session_id):
|
||||
return {
|
||||
'domURL': [sessions_mobs.get_first_url(project_id=project_id, session_id=session_id, check_existence=False)]}
|
||||
|
||||
|
||||
# This function should not use Clickhouse because it doesn't have `file_key`
|
||||
def get_replay(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False,
|
||||
group_metadata=False, live=True):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
extra_query = []
|
||||
if include_fav_viewed:
|
||||
extra_query.append("""COALESCE((SELECT TRUE
|
||||
FROM public.user_favorite_sessions AS fs
|
||||
WHERE s.session_id = fs.session_id
|
||||
AND fs.user_id = %(userId)s), FALSE) AS favorite""")
|
||||
extra_query.append("""COALESCE((SELECT TRUE
|
||||
FROM public.user_viewed_sessions AS fs
|
||||
WHERE s.session_id = fs.session_id
|
||||
AND fs.user_id = %(userId)s), FALSE) AS viewed""")
|
||||
query = cur.mogrify(
|
||||
f"""\
|
||||
SELECT
|
||||
s.*,
|
||||
s.session_id::text AS session_id,
|
||||
encode(file_key,'hex') AS file_key,
|
||||
(SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key
|
||||
{"," if len(extra_query) > 0 else ""}{",".join(extra_query)}
|
||||
{(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''}
|
||||
FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""}
|
||||
WHERE s.project_id = %(project_id)s
|
||||
AND s.session_id = %(session_id)s;""",
|
||||
{"project_id": project_id, "session_id": session_id, "userId": context.user_id}
|
||||
)
|
||||
cur.execute(query=query)
|
||||
|
||||
data = cur.fetchone()
|
||||
if data is not None:
|
||||
data = helper.dict_to_camel_case(data)
|
||||
if full_data:
|
||||
if __is_mobile_session(data["platform"]):
|
||||
data['mobsUrl'] = []
|
||||
data['videoURL'] = sessions_mobs.get_mobile_videos(session_id=session_id, project_id=project_id,
|
||||
check_existence=False)
|
||||
else:
|
||||
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False)
|
||||
# for EE
|
||||
# context is required to check if the use have the right to access devtools
|
||||
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
|
||||
context=context, check_existence=False)
|
||||
data['canvasURL'] = canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id)
|
||||
if user_testing.has_test_signals(session_id=session_id, project_id=project_id):
|
||||
data['utxVideo'] = user_testing.get_ux_webcam_signed_url(session_id=session_id,
|
||||
project_id=project_id,
|
||||
check_existence=False)
|
||||
else:
|
||||
data['utxVideo'] = []
|
||||
|
||||
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id,
|
||||
check_existence=False)
|
||||
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
|
||||
data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id,
|
||||
project_key=data["projectKey"])
|
||||
data["inDB"] = True
|
||||
return data
|
||||
elif live:
|
||||
return assist.get_live_session_by_id(project_id=project_id, session_id=session_id)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def get_events(project_id, session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
f"""SELECT session_id, platform, start_ts, duration
|
||||
FROM public.sessions AS s
|
||||
WHERE s.project_id = %(project_id)s
|
||||
AND s.session_id = %(session_id)s;""",
|
||||
{"project_id": project_id, "session_id": session_id}
|
||||
)
|
||||
cur.execute(query=query)
|
||||
|
||||
s_data = cur.fetchone()
|
||||
if s_data is not None:
|
||||
s_data = helper.dict_to_camel_case(s_data)
|
||||
data = {}
|
||||
if __is_mobile_session(s_data["platform"]):
|
||||
data['events'] = events_mobile.get_by_sessionId(project_id=project_id, session_id=session_id)
|
||||
for e in data['events']:
|
||||
if e["type"].endswith("_IOS"):
|
||||
e["type"] = e["type"][:-len("_IOS")]
|
||||
elif e["type"].endswith("_MOBILE"):
|
||||
e["type"] = e["type"][:-len("_MOBILE")]
|
||||
data['crashes'] = events_mobile.get_crashes_by_session_id(session_id=session_id)
|
||||
data['userEvents'] = events_mobile.get_customs_by_session_id(project_id=project_id,
|
||||
session_id=session_id)
|
||||
data['userTesting'] = []
|
||||
else:
|
||||
data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id,
|
||||
group_clickrage=True)
|
||||
all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id)
|
||||
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
|
||||
# to keep only the first stack
|
||||
# limit the number of errors to reduce the response-body size
|
||||
data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors
|
||||
if e['source'] == "js_exception"][:500]
|
||||
data['userEvents'] = events.get_customs_by_session_id(project_id=project_id,
|
||||
session_id=session_id)
|
||||
data['userTesting'] = user_testing.get_test_signals(session_id=session_id, project_id=project_id)
|
||||
|
||||
data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id)
|
||||
data['issues'] = reduce_issues(data['issues'])
|
||||
return data
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
# To reduce the number of issues in the replay;
|
||||
# will be removed once we agree on how to show issues
|
||||
def reduce_issues(issues_list):
|
||||
if issues_list is None:
|
||||
return None
|
||||
i = 0
|
||||
# remove same-type issues if the time between them is <2s
|
||||
while i < len(issues_list) - 1:
|
||||
for j in range(i + 1, len(issues_list)):
|
||||
if issues_list[i]["type"] == issues_list[j]["type"]:
|
||||
break
|
||||
else:
|
||||
i += 1
|
||||
break
|
||||
|
||||
if issues_list[i]["timestamp"] - issues_list[j]["timestamp"] < 2000:
|
||||
issues_list.pop(j)
|
||||
else:
|
||||
i += 1
|
||||
|
||||
return issues_list
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
from chalicelib.core import sessions_viewed_exp
|
||||
from chalicelib.utils import pg_client
|
||||
|
||||
|
||||
def view_session(project_id, user_id, session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""INSERT INTO public.user_viewed_sessions (user_id, session_id)
|
||||
VALUES (%(userId)s,%(sessionId)s)
|
||||
ON CONFLICT DO NOTHING;""",
|
||||
{"userId": user_id, "sessionId": session_id})
|
||||
)
|
||||
sessions_viewed_exp.view_session(project_id=project_id, user_id=user_id, session_id=session_id)
|
||||
|
|
@ -1,11 +1,15 @@
|
|||
from chalicelib.utils import ch_client, exp_ch_helper
|
||||
import logging
|
||||
from decouple import config
|
||||
from chalicelib.core.sessions.sessions_viewed import *
|
||||
|
||||
_view_session = view_session
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
|
||||
|
||||
def view_session(project_id, user_id, session_id):
|
||||
_view_session(project_id=project_id, user_id=user_id, session_id=session_id)
|
||||
try:
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = f"""INSERT INTO {exp_ch_helper.get_user_viewed_sessions_table()}(project_id, user_id, session_id)
|
||||
|
|
@ -12,28 +12,38 @@ rm -rf ./chalicelib/core/authorizers.py
|
|||
rm -rf ./chalicelib/core/autocomplete
|
||||
rm -rf ./chalicelib/core/collaborations
|
||||
rm -rf ./chalicelib/core/countries.py
|
||||
rm -rf ./chalicelib/core/metrics.py
|
||||
rm -rf ./chalicelib/core/custom_metrics.py
|
||||
rm -rf ./chalicelib/core/custom_metrics_predefined.py
|
||||
rm -rf ./chalicelib/core/dashboards.py
|
||||
rm -rf ./chalicelib/core/errors_favorite.py
|
||||
rm -rf ./chalicelib/core/metrics/metrics.py
|
||||
rm -rf ./chalicelib/core/metrics/custom_metrics.py
|
||||
rm -rf ./chalicelib/core/metrics/custom_metrics_predefined.py
|
||||
rm -rf ./chalicelib/core/metrics/funnels.py
|
||||
rm -rf ./chalicelib/core/metrics/dashboards.py
|
||||
rm -rf ./chalicelib/core/metrics/heatmaps.py
|
||||
rm -rf ./chalicelib/core/metrics/heatmaps_ch.py
|
||||
rm -rf ./chalicelib/core/metrics/metrics_ch.py
|
||||
rm -rf ./chalicelib/core/metrics/product_analytics.py
|
||||
rm -rf ./chalicelib/core/metrics/product_analytics_ch.py
|
||||
rm -rf ./chalicelib/core/metrics/product_anaytics2.py
|
||||
rm -rf ./chalicelib/core/events.py
|
||||
rm -rf ./chalicelib/core/events_mobile.py
|
||||
rm -rf ./chalicelib/core/feature_flags.py
|
||||
rm -rf ./chalicelib/core/funnels.py
|
||||
rm -rf ./chalicelib/core/issue_tracking/*.py
|
||||
rm -rf ./chalicelib/core/issue_tracking
|
||||
rm -rf ./chalicelib/core/integrations_manager.py
|
||||
rm -rf ./chalicelib/core/issues.py
|
||||
rm -rf ./chalicelib/core/jobs.py
|
||||
rm -rf ./chalicelib/core/log_tools/*.py
|
||||
rm -rf ./chalicelib/core/log_tools
|
||||
rm -rf ./chalicelib/core/metadata.py
|
||||
rm -rf ./chalicelib/core/mobile.py
|
||||
rm -rf ./chalicelib/core/saved_search.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_ch.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_devtool.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_favorite.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_assignments.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_metas.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_mobs.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_replay.py
|
||||
rm -rf ./chalicelib/core/sessions/performance_event.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_viewed.py
|
||||
rm -rf ./chalicelib/core/sessions/unprocessed_sessions.py
|
||||
rm -rf ./chalicelib/core/significance.py
|
||||
rm -rf ./chalicelib/core/socket_ios.py
|
||||
|
|
@ -44,6 +54,7 @@ rm -rf ./chalicelib/core/tags.py
|
|||
rm -rf ./chalicelib/saml
|
||||
rm -rf ./chalicelib/utils/__init__.py
|
||||
rm -rf ./chalicelib/utils/args_transformer.py
|
||||
rm -rf ./chalicelib/core/boarding.py
|
||||
rm -rf ./chalicelib/core/canvas.py
|
||||
rm -rf ./chalicelib/utils/captcha.py
|
||||
rm -rf ./chalicelib/utils/dev.py
|
||||
|
|
@ -96,3 +107,8 @@ rm -rf ./chalicelib/core/alerts/alerts_processor.py
|
|||
rm -rf ./chalicelib/core/alerts/alerts_processor_ch.py
|
||||
rm -rf ./chalicelib/core/alerts/alerts_listener.py
|
||||
rm -rf ./chalicelib/core/alerts/modules/helpers.py
|
||||
rm -rf /chalicelib/core/errors/modules
|
||||
rm -rf /chalicelib/core/errors/errors.py
|
||||
rm -rf /chalicelib/core/errors/errors_ch.py
|
||||
rm -rf /chalicelib/core/errors/errors_favorite.py
|
||||
rm -rf /chalicelib/core/errors/errors_viewed.py
|
||||
|
|
|
|||
|
|
@ -8,7 +8,9 @@ from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Re
|
|||
|
||||
import schemas
|
||||
from chalicelib.core import scope
|
||||
from chalicelib.core import assist, heatmaps, errors, errors_viewed, errors_favorite, signup, feature_flags
|
||||
from chalicelib.core import assist, signup, feature_flags
|
||||
from chalicelib.core.errors import errors, errors_viewed, errors_favorite
|
||||
from chalicelib.core.metrics import heatmaps
|
||||
from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_assignments, \
|
||||
sessions_viewed, unprocessed_sessions
|
||||
from chalicelib.core import tenants, users, projects, license
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from typing import Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import dashboards, custom_metrics
|
||||
from chalicelib.core.metrics import dashboards, custom_metrics
|
||||
from fastapi import Body, Depends
|
||||
from or_dependencies import OR_context, OR_scope
|
||||
from routers.base import get_routers
|
||||
|
|
|
|||
|
|
@ -14,6 +14,15 @@
|
|||
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
|
||||
<link href="https://fonts.googleapis.com/css2?family=Roboto:wght@300;400;500;700&display=swap" rel="stylesheet" />
|
||||
|
||||
<link rel="preload" href="/assets/prism/prism.css" as="style" onload="this.onload=null;this.rel='stylesheet'">
|
||||
<script defer src="/assets/prism/prism.min.js"></script>
|
||||
<script defer src="/assets/prism/prism-javascript.min.js"></script>
|
||||
<script defer src="/assets/prism/prism-bash.min.js"></script>
|
||||
<script defer src="/assets/prism/prism-jsx.min.js"></script>
|
||||
<script defer src="/assets/prism/prism-typescript.min.js"></script>
|
||||
<script defer src="/assets/prism/prism-kotlin.min.js"></script>
|
||||
<script defer src="/assets/prism/prism-swift.min.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div id="modal-root"></div>
|
||||
|
|
|
|||
1
frontend/app/assets/prism/prism-bash.min.js
vendored
Normal file
1
frontend/app/assets/prism/prism-bash.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1
frontend/app/assets/prism/prism-javascript.min.js
vendored
Normal file
1
frontend/app/assets/prism/prism-javascript.min.js
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
Prism.languages.javascript=Prism.languages.extend("clike",{"class-name":[Prism.languages.clike["class-name"],{pattern:/(^|[^$\w\xA0-\uFFFF])(?!\s)[_$A-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\.(?:constructor|prototype))/,lookbehind:!0}],keyword:[{pattern:/((?:^|\})\s*)catch\b/,lookbehind:!0},{pattern:/(^|[^.]|\.\.\.\s*)\b(?:as|assert(?=\s*\{)|async(?=\s*(?:function\b|\(|[$\w\xA0-\uFFFF]|$))|await|break|case|class|const|continue|debugger|default|delete|do|else|enum|export|extends|finally(?=\s*(?:\{|$))|for|from(?=\s*(?:['"]|$))|function|(?:get|set)(?=\s*(?:[#\[$\w\xA0-\uFFFF]|$))|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|static|super|switch|this|throw|try|typeof|undefined|var|void|while|with|yield)\b/,lookbehind:!0}],function:/#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*(?:\.\s*(?:apply|bind|call)\s*)?\()/,number:{pattern:RegExp("(^|[^\\w$])(?:NaN|Infinity|0[bB][01]+(?:_[01]+)*n?|0[oO][0-7]+(?:_[0-7]+)*n?|0[xX][\\dA-Fa-f]+(?:_[\\dA-Fa-f]+)*n?|\\d+(?:_\\d+)*n|(?:\\d+(?:_\\d+)*(?:\\.(?:\\d+(?:_\\d+)*)?)?|\\.\\d+(?:_\\d+)*)(?:[Ee][+-]?\\d+(?:_\\d+)*)?)(?![\\w$])"),lookbehind:!0},operator:/--|\+\+|\*\*=?|=>|&&=?|\|\|=?|[!=]==|<<=?|>>>?=?|[-+*/%&|^!=<>]=?|\.{3}|\?\?=?|\?\.?|[~:]/}),Prism.languages.javascript["class-name"][0].pattern=/(\b(?:class|extends|implements|instanceof|interface|new)\s+)[\w.\\]+/,Prism.languages.insertBefore("javascript","keyword",{regex:{pattern:RegExp("((?:^|[^$\\w\\xA0-\\uFFFF.\"'\\])\\s]|\\b(?:return|yield))\\s*)/(?:(?:\\[(?:[^\\]\\\\\r\n]|\\\\.)*\\]|\\\\.|[^/\\\\\\[\r\n])+/[dgimyus]{0,7}|(?:\\[(?:[^[\\]\\\\\r\n]|\\\\.|\\[(?:[^[\\]\\\\\r\n]|\\\\.|\\[(?:[^[\\]\\\\\r\n]|\\\\.)*\\])*\\])*\\]|\\\\.|[^/\\\\\\[\r\n])+/[dgimyus]{0,7}v[dgimyus]{0,7})(?=(?:\\s|/\\*(?:[^*]|\\*(?!/))*\\*/)*(?:$|[\r\n,.;:})\\]]|//))"),lookbehind:!0,greedy:!0,inside:{"regex-source":{pattern:/^(\/)[\s\S]+(?=\/[a-z]*$)/,lookbehind:!0,alias:"language-regex",inside:Prism.languages.regex},"regex-delimiter":/^\/|\/$/,"regex-flags":/^[a-z]+$/}},"function-variable":{pattern:/#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*[=:]\s*(?:async\s*)?(?:\bfunction\b|(?:\((?:[^()]|\([^()]*\))*\)|(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*)\s*=>))/,alias:"function"},parameter:[{pattern:/(function(?:\s+(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*)?\s*\(\s*)(?!\s)(?:[^()\s]|\s+(?![\s)])|\([^()]*\))+(?=\s*\))/,lookbehind:!0,inside:Prism.languages.javascript},{pattern:/(^|[^$\w\xA0-\uFFFF])(?!\s)[_$a-z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*=>)/i,lookbehind:!0,inside:Prism.languages.javascript},{pattern:/(\(\s*)(?!\s)(?:[^()\s]|\s+(?![\s)])|\([^()]*\))+(?=\s*\)\s*=>)/,lookbehind:!0,inside:Prism.languages.javascript},{pattern:/((?:\b|\s|^)(?!(?:as|async|await|break|case|catch|class|const|continue|debugger|default|delete|do|else|enum|export|extends|finally|for|from|function|get|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|set|static|super|switch|this|throw|try|typeof|undefined|var|void|while|with|yield)(?![$\w\xA0-\uFFFF]))(?:(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*\s*)\(\s*|\]\s*\(\s*)(?!\s)(?:[^()\s]|\s+(?![\s)])|\([^()]*\))+(?=\s*\)\s*\{)/,lookbehind:!0,inside:Prism.languages.javascript}],constant:/\b[A-Z](?:[A-Z_]|\dx?)*\b/}),Prism.languages.insertBefore("javascript","string",{hashbang:{pattern:/^#!.*/,greedy:!0,alias:"comment"},"template-string":{pattern:/`(?:\\[\s\S]|\$\{(?:[^{}]|\{(?:[^{}]|\{[^}]*\})*\})+\}|(?!\$\{)[^\\`])*`/,greedy:!0,inside:{"template-punctuation":{pattern:/^`|`$/,alias:"string"},interpolation:{pattern:/((?:^|[^\\])(?:\\{2})*)\$\{(?:[^{}]|\{(?:[^{}]|\{[^}]*\})*\})+\}/,lookbehind:!0,inside:{"interpolation-punctuation":{pattern:/^\$\{|\}$/,alias:"punctuation"},rest:Prism.languages.javascript}},string:/[\s\S]+/}},"string-property":{pattern:/((?:^|[,{])[ \t]*)(["'])(?:\\(?:\r\n|[\s\S])|(?!\2)[^\\\r\n])*\2(?=\s*:)/m,lookbehind:!0,greedy:!0,alias:"property"}}),Prism.languages.insertBefore("javascript","operator",{"literal-property":{pattern:/((?:^|[,{])[ \t]*)(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*:)/m,lookbehind:!0,alias:"property"}}),Prism.languages.markup&&(Prism.languages.markup.tag.addInlined("script","javascript"),Prism.languages.markup.tag.addAttribute("on(?:abort|blur|change|click|composition(?:end|start|update)|dblclick|error|focus(?:in|out)?|key(?:down|up)|load|mouse(?:down|enter|leave|move|out|over|up)|reset|resize|scroll|select|slotchange|submit|unload|wheel)","javascript")),Prism.languages.js=Prism.languages.javascript;
|
||||
1
frontend/app/assets/prism/prism-jsx.min.js
vendored
Normal file
1
frontend/app/assets/prism/prism-jsx.min.js
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
!function(t){var n=t.util.clone(t.languages.javascript),e="(?:\\{<S>*\\.{3}(?:[^{}]|<BRACES>)*\\})";function a(t,n){return t=t.replace(/<S>/g,(function(){return"(?:\\s|//.*(?!.)|/\\*(?:[^*]|\\*(?!/))\\*/)"})).replace(/<BRACES>/g,(function(){return"(?:\\{(?:\\{(?:\\{[^{}]*\\}|[^{}])*\\}|[^{}])*\\})"})).replace(/<SPREAD>/g,(function(){return e})),RegExp(t,n)}e=a(e).source,t.languages.jsx=t.languages.extend("markup",n),t.languages.jsx.tag.pattern=a("</?(?:[\\w.:-]+(?:<S>+(?:[\\w.:$-]+(?:=(?:\"(?:\\\\[^]|[^\\\\\"])*\"|'(?:\\\\[^]|[^\\\\'])*'|[^\\s{'\"/>=]+|<BRACES>))?|<SPREAD>))*<S>*/?)?>"),t.languages.jsx.tag.inside.tag.pattern=/^<\/?[^\s>\/]*/,t.languages.jsx.tag.inside["attr-value"].pattern=/=(?!\{)(?:"(?:\\[\s\S]|[^\\"])*"|'(?:\\[\s\S]|[^\\'])*'|[^\s'">]+)/,t.languages.jsx.tag.inside.tag.inside["class-name"]=/^[A-Z]\w*(?:\.[A-Z]\w*)*$/,t.languages.jsx.tag.inside.comment=n.comment,t.languages.insertBefore("inside","attr-name",{spread:{pattern:a("<SPREAD>"),inside:t.languages.jsx}},t.languages.jsx.tag),t.languages.insertBefore("inside","special-attr",{script:{pattern:a("=<BRACES>"),alias:"language-javascript",inside:{"script-punctuation":{pattern:/^=(?=\{)/,alias:"punctuation"},rest:t.languages.jsx}}},t.languages.jsx.tag);var s=function(t){return t?"string"==typeof t?t:"string"==typeof t.content?t.content:t.content.map(s).join(""):""},g=function(n){for(var e=[],a=0;a<n.length;a++){var o=n[a],i=!1;if("string"!=typeof o&&("tag"===o.type&&o.content[0]&&"tag"===o.content[0].type?"</"===o.content[0].content[0].content?e.length>0&&e[e.length-1].tagName===s(o.content[0].content[1])&&e.pop():"/>"===o.content[o.content.length-1].content||e.push({tagName:s(o.content[0].content[1]),openedBraces:0}):e.length>0&&"punctuation"===o.type&&"{"===o.content?e[e.length-1].openedBraces++:e.length>0&&e[e.length-1].openedBraces>0&&"punctuation"===o.type&&"}"===o.content?e[e.length-1].openedBraces--:i=!0),(i||"string"==typeof o)&&e.length>0&&0===e[e.length-1].openedBraces){var r=s(o);a<n.length-1&&("string"==typeof n[a+1]||"plain-text"===n[a+1].type)&&(r+=s(n[a+1]),n.splice(a+1,1)),a>0&&("string"==typeof n[a-1]||"plain-text"===n[a-1].type)&&(r=s(n[a-1])+r,n.splice(a-1,1),a--),n[a]=new t.Token("plain-text",r,null,r)}o.content&&"string"!=typeof o.content&&g(o.content)}};t.hooks.add("after-tokenize",(function(t){"jsx"!==t.language&&"tsx"!==t.language||g(t.tokens)}))}(Prism);
|
||||
1
frontend/app/assets/prism/prism-kotlin.min.js
vendored
Normal file
1
frontend/app/assets/prism/prism-kotlin.min.js
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
!function(n){n.languages.kotlin=n.languages.extend("clike",{keyword:{pattern:/(^|[^.])\b(?:abstract|actual|annotation|as|break|by|catch|class|companion|const|constructor|continue|crossinline|data|do|dynamic|else|enum|expect|external|final|finally|for|fun|get|if|import|in|infix|init|inline|inner|interface|internal|is|lateinit|noinline|null|object|open|operator|out|override|package|private|protected|public|reified|return|sealed|set|super|suspend|tailrec|this|throw|to|try|typealias|val|var|vararg|when|where|while)\b/,lookbehind:!0},function:[{pattern:/(?:`[^\r\n`]+`|\b\w+)(?=\s*\()/,greedy:!0},{pattern:/(\.)(?:`[^\r\n`]+`|\w+)(?=\s*\{)/,lookbehind:!0,greedy:!0}],number:/\b(?:0[xX][\da-fA-F]+(?:_[\da-fA-F]+)*|0[bB][01]+(?:_[01]+)*|\d+(?:_\d+)*(?:\.\d+(?:_\d+)*)?(?:[eE][+-]?\d+(?:_\d+)*)?[fFL]?)\b/,operator:/\+[+=]?|-[-=>]?|==?=?|!(?:!|==?)?|[\/*%<>]=?|[?:]:?|\.\.|&&|\|\||\b(?:and|inv|or|shl|shr|ushr|xor)\b/}),delete n.languages.kotlin["class-name"];var e={"interpolation-punctuation":{pattern:/^\$\{?|\}$/,alias:"punctuation"},expression:{pattern:/[\s\S]+/,inside:n.languages.kotlin}};n.languages.insertBefore("kotlin","string",{"string-literal":[{pattern:/"""(?:[^$]|\$(?:(?!\{)|\{[^{}]*\}))*?"""/,alias:"multiline",inside:{interpolation:{pattern:/\$(?:[a-z_]\w*|\{[^{}]*\})/i,inside:e},string:/[\s\S]+/}},{pattern:/"(?:[^"\\\r\n$]|\\.|\$(?:(?!\{)|\{[^{}]*\}))*"/,alias:"singleline",inside:{interpolation:{pattern:/((?:^|[^\\])(?:\\{2})*)\$(?:[a-z_]\w*|\{[^{}]*\})/i,lookbehind:!0,inside:e},string:/[\s\S]+/}}],char:{pattern:/'(?:[^'\\\r\n]|\\(?:.|u[a-fA-F0-9]{0,4}))'/,greedy:!0}}),delete n.languages.kotlin.string,n.languages.insertBefore("kotlin","keyword",{annotation:{pattern:/\B@(?:\w+:)?(?:[A-Z]\w*|\[[^\]]+\])/,alias:"builtin"}}),n.languages.insertBefore("kotlin","function",{label:{pattern:/\b\w+@|@\w+\b/,alias:"symbol"}}),n.languages.kt=n.languages.kotlin,n.languages.kts=n.languages.kotlin}(Prism);
|
||||
1
frontend/app/assets/prism/prism-swift.min.js
vendored
Normal file
1
frontend/app/assets/prism/prism-swift.min.js
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
Prism.languages.swift={comment:{pattern:/(^|[^\\:])(?:\/\/.*|\/\*(?:[^/*]|\/(?!\*)|\*(?!\/)|\/\*(?:[^*]|\*(?!\/))*\*\/)*\*\/)/,lookbehind:!0,greedy:!0},"string-literal":[{pattern:RegExp('(^|[^"#])(?:"(?:\\\\(?:\\((?:[^()]|\\([^()]*\\))*\\)|\r\n|[^(])|[^\\\\\r\n"])*"|"""(?:\\\\(?:\\((?:[^()]|\\([^()]*\\))*\\)|[^(])|[^\\\\"]|"(?!""))*""")(?!["#])'),lookbehind:!0,greedy:!0,inside:{interpolation:{pattern:/(\\\()(?:[^()]|\([^()]*\))*(?=\))/,lookbehind:!0,inside:null},"interpolation-punctuation":{pattern:/^\)|\\\($/,alias:"punctuation"},punctuation:/\\(?=[\r\n])/,string:/[\s\S]+/}},{pattern:RegExp('(^|[^"#])(#+)(?:"(?:\\\\(?:#+\\((?:[^()]|\\([^()]*\\))*\\)|\r\n|[^#])|[^\\\\\r\n])*?"|"""(?:\\\\(?:#+\\((?:[^()]|\\([^()]*\\))*\\)|[^#])|[^\\\\])*?""")\\2'),lookbehind:!0,greedy:!0,inside:{interpolation:{pattern:/(\\#+\()(?:[^()]|\([^()]*\))*(?=\))/,lookbehind:!0,inside:null},"interpolation-punctuation":{pattern:/^\)|\\#+\($/,alias:"punctuation"},string:/[\s\S]+/}}],directive:{pattern:RegExp("#(?:(?:elseif|if)\\b(?:[ \t]*(?:![ \t]*)?(?:\\b\\w+\\b(?:[ \t]*\\((?:[^()]|\\([^()]*\\))*\\))?|\\((?:[^()]|\\([^()]*\\))*\\))(?:[ \t]*(?:&&|\\|\\|))?)+|(?:else|endif)\\b)"),alias:"property",inside:{"directive-name":/^#\w+/,boolean:/\b(?:false|true)\b/,number:/\b\d+(?:\.\d+)*\b/,operator:/!|&&|\|\||[<>]=?/,punctuation:/[(),]/}},literal:{pattern:/#(?:colorLiteral|column|dsohandle|file(?:ID|Literal|Path)?|function|imageLiteral|line)\b/,alias:"constant"},"other-directive":{pattern:/#\w+\b/,alias:"property"},attribute:{pattern:/@\w+/,alias:"atrule"},"function-definition":{pattern:/(\bfunc\s+)\w+/,lookbehind:!0,alias:"function"},label:{pattern:/\b(break|continue)\s+\w+|\b[a-zA-Z_]\w*(?=\s*:\s*(?:for|repeat|while)\b)/,lookbehind:!0,alias:"important"},keyword:/\b(?:Any|Protocol|Self|Type|actor|as|assignment|associatedtype|associativity|async|await|break|case|catch|class|continue|convenience|default|defer|deinit|didSet|do|dynamic|else|enum|extension|fallthrough|fileprivate|final|for|func|get|guard|higherThan|if|import|in|indirect|infix|init|inout|internal|is|isolated|lazy|left|let|lowerThan|mutating|none|nonisolated|nonmutating|open|operator|optional|override|postfix|precedencegroup|prefix|private|protocol|public|repeat|required|rethrows|return|right|safe|self|set|some|static|struct|subscript|super|switch|throw|throws|try|typealias|unowned|unsafe|var|weak|where|while|willSet)\b/,boolean:/\b(?:false|true)\b/,nil:{pattern:/\bnil\b/,alias:"constant"},"short-argument":/\$\d+\b/,omit:{pattern:/\b_\b/,alias:"keyword"},number:/\b(?:[\d_]+(?:\.[\de_]+)?|0x[a-f0-9_]+(?:\.[a-f0-9p_]+)?|0b[01_]+|0o[0-7_]+)\b/i,"class-name":/\b[A-Z](?:[A-Z_\d]*[a-z]\w*)?\b/,function:/\b[a-z_]\w*(?=\s*\()/i,constant:/\b(?:[A-Z_]{2,}|k[A-Z][A-Za-z_]+)\b/,operator:/[-+*/%=!<>&|^~?]+|\.[.\-+*/%=!<>&|^~?]+/,punctuation:/[{}[\]();,.:\\]/},Prism.languages.swift["string-literal"].forEach((function(e){e.inside.interpolation.inside=Prism.languages.swift}));
|
||||
1
frontend/app/assets/prism/prism-typescript.min.js
vendored
Normal file
1
frontend/app/assets/prism/prism-typescript.min.js
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
!function(e){e.languages.typescript=e.languages.extend("javascript",{"class-name":{pattern:/(\b(?:class|extends|implements|instanceof|interface|new|type)\s+)(?!keyof\b)(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?:\s*<(?:[^<>]|<(?:[^<>]|<[^<>]*>)*>)*>)?/,lookbehind:!0,greedy:!0,inside:null},builtin:/\b(?:Array|Function|Promise|any|boolean|console|never|number|string|symbol|unknown)\b/}),e.languages.typescript.keyword.push(/\b(?:abstract|declare|is|keyof|readonly|require)\b/,/\b(?:asserts|infer|interface|module|namespace|type)\b(?=\s*(?:[{_$a-zA-Z\xA0-\uFFFF]|$))/,/\btype\b(?=\s*(?:[\{*]|$))/),delete e.languages.typescript.parameter,delete e.languages.typescript["literal-property"];var s=e.languages.extend("typescript",{});delete s["class-name"],e.languages.typescript["class-name"].inside=s,e.languages.insertBefore("typescript","function",{decorator:{pattern:/@[$\w\xA0-\uFFFF]+/,inside:{at:{pattern:/^@/,alias:"operator"},function:/^[\s\S]+/}},"generic-function":{pattern:/#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*\s*<(?:[^<>]|<(?:[^<>]|<[^<>]*>)*>)*>(?=\s*\()/,greedy:!0,inside:{function:/^#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*/,generic:{pattern:/<[\s\S]+/,alias:"class-name",inside:s}}}}),e.languages.ts=e.languages.typescript}(Prism);
|
||||
140
frontend/app/assets/prism/prism.css
Normal file
140
frontend/app/assets/prism/prism.css
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
/**
|
||||
* prism.js default theme for JavaScript, CSS and HTML
|
||||
* Based on dabblet (http://dabblet.com)
|
||||
* @author Lea Verou
|
||||
*/
|
||||
|
||||
code[class*="language-"],
|
||||
pre[class*="language-"] {
|
||||
color: black;
|
||||
background: none;
|
||||
text-shadow: 0 1px white;
|
||||
font-family: Consolas, Monaco, 'Andale Mono', 'Ubuntu Mono', monospace;
|
||||
font-size: 1em;
|
||||
text-align: left;
|
||||
white-space: pre;
|
||||
word-spacing: normal;
|
||||
word-break: normal;
|
||||
word-wrap: normal;
|
||||
line-height: 1.5;
|
||||
|
||||
-moz-tab-size: 4;
|
||||
-o-tab-size: 4;
|
||||
tab-size: 4;
|
||||
|
||||
-webkit-hyphens: none;
|
||||
-moz-hyphens: none;
|
||||
-ms-hyphens: none;
|
||||
hyphens: none;
|
||||
}
|
||||
|
||||
pre[class*="language-"]::-moz-selection, pre[class*="language-"] ::-moz-selection,
|
||||
code[class*="language-"]::-moz-selection, code[class*="language-"] ::-moz-selection {
|
||||
text-shadow: none;
|
||||
background: #b3d4fc;
|
||||
}
|
||||
|
||||
pre[class*="language-"]::selection, pre[class*="language-"] ::selection,
|
||||
code[class*="language-"]::selection, code[class*="language-"] ::selection {
|
||||
text-shadow: none;
|
||||
background: #b3d4fc;
|
||||
}
|
||||
|
||||
@media print {
|
||||
code[class*="language-"],
|
||||
pre[class*="language-"] {
|
||||
text-shadow: none;
|
||||
}
|
||||
}
|
||||
|
||||
/* Code blocks */
|
||||
pre[class*="language-"] {
|
||||
padding: 1em;
|
||||
margin: .5em 0;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
:not(pre) > code[class*="language-"],
|
||||
pre[class*="language-"] {
|
||||
background: #f5f2f0;
|
||||
}
|
||||
|
||||
/* Inline code */
|
||||
:not(pre) > code[class*="language-"] {
|
||||
padding: .1em;
|
||||
border-radius: .3em;
|
||||
white-space: normal;
|
||||
}
|
||||
|
||||
.token.comment,
|
||||
.token.prolog,
|
||||
.token.doctype,
|
||||
.token.cdata {
|
||||
color: slategray;
|
||||
}
|
||||
|
||||
.token.punctuation {
|
||||
color: #999;
|
||||
}
|
||||
|
||||
.token.namespace {
|
||||
opacity: .7;
|
||||
}
|
||||
|
||||
.token.property,
|
||||
.token.tag,
|
||||
.token.boolean,
|
||||
.token.number,
|
||||
.token.constant,
|
||||
.token.symbol,
|
||||
.token.deleted {
|
||||
color: #905;
|
||||
}
|
||||
|
||||
.token.selector,
|
||||
.token.attr-name,
|
||||
.token.string,
|
||||
.token.char,
|
||||
.token.builtin,
|
||||
.token.inserted {
|
||||
color: #690;
|
||||
}
|
||||
|
||||
.token.operator,
|
||||
.token.entity,
|
||||
.token.url,
|
||||
.language-css .token.string,
|
||||
.style .token.string {
|
||||
color: #9a6e3a;
|
||||
/* This background color was intended by the author of this theme. */
|
||||
background: hsla(0, 0%, 100%, .5);
|
||||
}
|
||||
|
||||
.token.atrule,
|
||||
.token.attr-value,
|
||||
.token.keyword {
|
||||
color: #07a;
|
||||
}
|
||||
|
||||
.token.function,
|
||||
.token.class-name {
|
||||
color: #DD4A68;
|
||||
}
|
||||
|
||||
.token.regex,
|
||||
.token.important,
|
||||
.token.variable {
|
||||
color: #e90;
|
||||
}
|
||||
|
||||
.token.important,
|
||||
.token.bold {
|
||||
font-weight: bold;
|
||||
}
|
||||
.token.italic {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.token.entity {
|
||||
cursor: help;
|
||||
}
|
||||
16
frontend/app/assets/prism/prism.min.js
vendored
Normal file
16
frontend/app/assets/prism/prism.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
|
|
@ -1,20 +1,19 @@
|
|||
import React, { useEffect } from "react";
|
||||
import Prism from "prismjs";
|
||||
import React, { useEffect } from 'react';
|
||||
|
||||
interface IProps {
|
||||
code: string;
|
||||
language: string;
|
||||
}
|
||||
|
||||
const CodeBlock = ({ code, language }: IProps) => {
|
||||
export default function CodeBlock({ code, language = 'javascript' }) {
|
||||
useEffect(() => {
|
||||
Prism.highlightAll(false);
|
||||
}, []);
|
||||
setTimeout(() => {
|
||||
if (window.Prism) {
|
||||
Prism.highlightAll();
|
||||
}
|
||||
}, 0)
|
||||
}, [code, language]);
|
||||
|
||||
return (
|
||||
<pre>
|
||||
<code children={code} className={`language-${language}`} />
|
||||
<code className={`language-${language}`}>
|
||||
{code}
|
||||
</code>
|
||||
</pre>
|
||||
);
|
||||
};
|
||||
|
||||
export default CodeBlock;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import './styles/index.css';
|
||||
import './styles/global.scss'
|
||||
import './styles/global.css'
|
||||
import React from 'react';
|
||||
import { createRoot } from 'react-dom/client';
|
||||
import './init';
|
||||
|
|
|
|||
|
|
@ -7,21 +7,5 @@ module.exports = {
|
|||
plugins: [
|
||||
'babel-plugin-react-require',
|
||||
['@babel/plugin-proposal-decorators', { legacy: true }],
|
||||
[
|
||||
'prismjs',
|
||||
{
|
||||
languages: [
|
||||
'javascript',
|
||||
'css',
|
||||
'bash',
|
||||
'typescript',
|
||||
'jsx',
|
||||
'kotlin',
|
||||
'swift',
|
||||
],
|
||||
theme: 'default',
|
||||
css: true,
|
||||
},
|
||||
],
|
||||
],
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,6 +1,11 @@
|
|||
const colors = require('./app/theme/colors');
|
||||
const defaultColors = require('tailwindcss/colors');
|
||||
|
||||
const deprecatedDefaults = ['lightBlue', 'warmGray', 'trueGray', 'coolGray', 'blueGray']
|
||||
deprecatedDefaults.forEach(color => {
|
||||
delete defaultColors[color]
|
||||
})
|
||||
|
||||
module.exports = {
|
||||
content: [
|
||||
'./app/**/*.tsx',
|
||||
|
|
|
|||
|
|
@ -122,6 +122,7 @@ const config: Configuration = {
|
|||
},
|
||||
},
|
||||
plugins: [
|
||||
new webpack.ProgressPlugin(),
|
||||
(isDevelopment ? false : new CompressionPlugin({
|
||||
test: /\.(js|css|html|svg)$/,
|
||||
algorithm: 'brotliCompress',
|
||||
|
|
@ -141,7 +142,7 @@ const config: Configuration = {
|
|||
],
|
||||
}),
|
||||
new MiniCssExtractPlugin({ ignoreOrder: true }),
|
||||
],
|
||||
],
|
||||
devtool: isDevelopment ? "inline-source-map" : false,
|
||||
performance: {
|
||||
hints: false,
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue