* refactor(chalice): refactored errors

* refactor(chalice): refactored metrics/cards/dashboards
refactor(chalice): refactored sessions
refactor(chalice): refactored sourcemaps
This commit is contained in:
Kraiem Taha Yassine 2024-12-12 12:37:39 +01:00 committed by GitHub
parent e03bce3ba5
commit a654e30df2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
65 changed files with 386 additions and 1727 deletions

View file

@ -1,8 +1,8 @@
from chalicelib.utils import pg_client
from chalicelib.core import projects
from chalicelib.core.log_tools import datadog, stackdriver, sentry
from chalicelib.core import users
from chalicelib.core.log_tools import datadog, stackdriver, sentry
from chalicelib.core.modules import TENANT_CONDITION
from chalicelib.utils import pg_client
def get_state(tenant_id):
@ -21,21 +21,23 @@ def get_state(tenant_id):
recorded = cur.fetchone()["exists"]
meta = False
if recorded:
cur.execute("""SELECT EXISTS((SELECT 1
query = cur.mogrify("""SELECT EXISTS((SELECT 1
FROM public.projects AS p
LEFT JOIN LATERAL ( SELECT 1
FROM public.sessions
WHERE sessions.project_id = p.project_id
AND sessions.user_id IS NOT NULL
LIMIT 1) AS sessions(user_id) ON (TRUE)
WHERE p.deleted_at ISNULL
WHERE {TENANT_CONDITION} AND p.deleted_at ISNULL
AND ( sessions.user_id IS NOT NULL OR p.metadata_1 IS NOT NULL
OR p.metadata_2 IS NOT NULL OR p.metadata_3 IS NOT NULL
OR p.metadata_4 IS NOT NULL OR p.metadata_5 IS NOT NULL
OR p.metadata_6 IS NOT NULL OR p.metadata_7 IS NOT NULL
OR p.metadata_8 IS NOT NULL OR p.metadata_9 IS NOT NULL
OR p.metadata_10 IS NOT NULL )
)) AS exists;""")
)) AS exists;""",
{"tenant_id": tenant_id})
cur.execute(query)
meta = cur.fetchone()["exists"]
@ -78,21 +80,23 @@ def get_state_installing(tenant_id):
def get_state_identify_users(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute("""SELECT EXISTS((SELECT 1
query = cur.mogrify(f"""SELECT EXISTS((SELECT 1
FROM public.projects AS p
LEFT JOIN LATERAL ( SELECT 1
FROM public.sessions
WHERE sessions.project_id = p.project_id
AND sessions.user_id IS NOT NULL
LIMIT 1) AS sessions(user_id) ON (TRUE)
WHERE p.deleted_at ISNULL
WHERE {TENANT_CONDITION} AND p.deleted_at ISNULL
AND ( sessions.user_id IS NOT NULL OR p.metadata_1 IS NOT NULL
OR p.metadata_2 IS NOT NULL OR p.metadata_3 IS NOT NULL
OR p.metadata_4 IS NOT NULL OR p.metadata_5 IS NOT NULL
OR p.metadata_6 IS NOT NULL OR p.metadata_7 IS NOT NULL
OR p.metadata_8 IS NOT NULL OR p.metadata_9 IS NOT NULL
OR p.metadata_10 IS NOT NULL )
)) AS exists;""")
)) AS exists;""",
{"tenant_id": tenant_id})
cur.execute(query)
meta = cur.fetchone()["exists"]

View file

@ -0,0 +1,12 @@
import logging
from decouple import config
logger = logging.getLogger(__name__)
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
logger.info(">>> Using experimental error search")
from . import errors as errors_legacy
from . import errors_ch as errors
else:
from . import errors

View file

@ -1,7 +1,8 @@
import json
import schemas
from chalicelib.core import sourcemaps, sessions
from chalicelib.core import sourcemaps
from chalicelib.core.errors.modules import sessions
from chalicelib.utils import errors_helper
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC

View file

@ -1,7 +1,7 @@
from decouple import config
import schemas
from chalicelib.core import errors_legacy
from . import errors as errors_legacy
from chalicelib.core import metrics, metadata
from chalicelib.core import sessions
from chalicelib.utils import ch_client, exp_ch_helper
@ -151,9 +151,6 @@ def __process_tags_map(row):
def get_details(project_id, error_id, user_id, **data):
if not config("EXP_ERRORS_GET", cast=bool, default=False):
return errors_legacy.get_details(project_id, error_id, user_id, **data)
MAIN_SESSIONS_TABLE = exp_ch_helper.get_main_sessions_table(0)
MAIN_ERR_SESS_TABLE = exp_ch_helper.get_main_js_errors_sessions_table(0)
MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(0)
@ -167,7 +164,6 @@ def get_details(project_id, error_id, user_id, **data):
ch_basic_query = __get_basic_constraints(time_constraint=False)
ch_basic_query.append("error_id = %(error_id)s")
with ch_client.ClickHouseClient() as ch:
data["startDate24"] = TimeUTC.now(-1)
data["endDate24"] = TimeUTC.now()

View file

@ -0,0 +1,10 @@
import logging
from decouple import config
logger = logging.getLogger(__name__)
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
from chalicelib.core.sessions import sessions_ch as sessions
else:
from chalicelib.core.sessions import sessions

View file

@ -1,5 +1,5 @@
import schemas
from chalicelib.core.issue_tracking.modules import TENANT_CONDITION
from chalicelib.core.modules import TENANT_CONDITION
from chalicelib.utils import pg_client

View file

@ -1,6 +1,6 @@
from chalicelib.utils import pg_client, helper
import json
from chalicelib.core.log_tools.modules import TENANT_CONDITION
from chalicelib.core.modules import TENANT_CONDITION
EXCEPT = ["jira_server", "jira_cloud"]

View file

@ -1 +0,0 @@
TENANT_CONDITION = "TRUE"

View file

@ -0,0 +1,13 @@
import logging
from decouple import config
logger = logging.getLogger(__name__)
if config("EXP_METRICS", cast=bool, default=False):
logger.info(">>> Using experimental metrics")
from chalicelib.core.metrics import heatmaps_ch as heatmaps
from chalicelib.core.metrics import metrics_ch as metrics
else:
from chalicelib.core.metrics import heatmaps
from chalicelib.core.metrics import metrics

View file

@ -4,7 +4,8 @@ import logging
from fastapi import HTTPException, status
import schemas
from chalicelib.core import funnels, errors, issues, heatmaps, product_analytics, custom_metrics_predefined
from chalicelib.core import errors, issues
from chalicelib.core.metrics import heatmaps, product_analytics, funnels, custom_metrics_predefined
from chalicelib.core.sessions import sessions
from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC

View file

@ -2,7 +2,7 @@ import logging
from typing import Union
import schemas
from chalicelib.core import metrics
from chalicelib.core.metrics import metrics
logger = logging.getLogger(__name__)

View file

@ -1,7 +1,7 @@
import json
import schemas
from chalicelib.core import custom_metrics
from chalicelib.core.metrics import custom_metrics
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC

View file

@ -1,7 +1,7 @@
from typing import List
import schemas
from chalicelib.core import significance
from chalicelib.core.metrics.modules import significance
from chalicelib.utils import helper
from chalicelib.utils import sql_helper as sh

View file

@ -3,14 +3,10 @@ import logging
from decouple import config
import schemas
from chalicelib.core import sessions_mobs, events
from chalicelib.core import events
from chalicelib.core.metrics.modules import sessions, sessions_mobs
from chalicelib.utils import sql_helper as sh
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
from chalicelib.core import sessions_ch as sessions
else:
from chalicelib.core import sessions
from chalicelib.utils import pg_client, helper, ch_client, exp_ch_helper
logger = logging.getLogger(__name__)

View file

@ -0,0 +1,12 @@
import logging
from decouple import config
logger = logging.getLogger(__name__)
if config("EXP_METRICS", cast=bool, default=False):
from chalicelib.core.sessions import sessions_ch as sessions
else:
from chalicelib.core.sessions import sessions
from chalicelib.core.sessions import sessions_mobs

View file

@ -0,0 +1,10 @@
import logging
from decouple import config
logger = logging.getLogger(__name__)
from .significance import *
if config("EXP_METRICS", cast=bool, default=False):
from .significance_ch import *

View file

@ -1,20 +1,15 @@
import logging
import schemas
from chalicelib.core import events, metadata
from chalicelib.utils import sql_helper as sh
"""
todo: remove LIMIT from the query
"""
from typing import List
import math
import warnings
from collections import defaultdict
from typing import List
from psycopg2.extras import RealDictRow
import schemas
from chalicelib.core import events, metadata
from chalicelib.utils import pg_client, helper
from chalicelib.utils import sql_helper as sh
logger = logging.getLogger(__name__)
SIGNIFICANCE_THRSH = 0.4

View file

@ -1,6 +1,12 @@
import logging
from typing import List
from psycopg2.extras import RealDictRow
import schemas
from chalicelib.utils import ch_client
from chalicelib.utils import exp_ch_helper
from .significance import *
from chalicelib.utils import helper
logger = logging.getLogger(__name__)

View file

@ -1,5 +1,6 @@
from decouple import config
import schemas
from chalicelib.utils.storage import StorageClient
@ -13,7 +14,7 @@ def __get_devtools_keys(project_id, session_id):
]
def get_urls(session_id, project_id, check_existence: bool = True):
def get_urls(session_id, project_id, context: schemas.CurrentContext, check_existence: bool = True):
results = []
for k in __get_devtools_keys(project_id=project_id, session_id=session_id):
if check_existence and not StorageClient.exists(bucket=config("sessions_bucket"), key=k):

View file

@ -4,6 +4,7 @@ from chalicelib.core import events, metadata, events_mobile, \
from chalicelib.core.sessions import sessions_mobs, sessions_devtool
from chalicelib.utils import errors_helper
from chalicelib.utils import pg_client, helper
from chalicelib.core.modules import MOB_KEY
def __is_mobile_session(platform):
@ -42,6 +43,7 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat
SELECT
s.*,
s.session_id::text AS session_id,
{MOB_KEY}
(SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key
{"," if len(extra_query) > 0 else ""}{",".join(extra_query)}
{(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''}
@ -63,7 +65,7 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat
else:
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False)
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
check_existence=False)
context=context, check_existence=False)
data['canvasURL'] = canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id)
if user_testing.has_test_signals(session_id=session_id, project_id=project_id):
data['utxVideo'] = user_testing.get_ux_webcam_signed_url(session_id=session_id,

View file

@ -3,7 +3,7 @@ from urllib.parse import urlparse
import requests
from decouple import config
from chalicelib.core import sourcemaps_parser
from chalicelib.core.sourcemaps import sourcemaps_parser
from chalicelib.utils.storage import StorageClient, generators

View file

@ -72,4 +72,6 @@ STAGE=default-foss
TZ=UTC
EXP_CH_DRIVER=true
EXP_AUTOCOMPLETE=true
EXP_ALERTS=true
EXP_ALERTS=true
EXP_ERRORS_SEARCH=true
EXP_METRICS=true

View file

@ -8,7 +8,8 @@ from chalicelib.core import sourcemaps, events, projects, alerts, issues, \
metadata, reset_password, \
log_tools, sessions, announcements, \
weekly_report, assist, mobile, tenants, boarding, \
notifications, webhook, users, custom_metrics, saved_search, tags, autocomplete
notifications, webhook, users, saved_search, tags, autocomplete
from chalicelib.core.metrics import custom_metrics
from chalicelib.core.issue_tracking import github, integrations_global, integrations_manager, \
jira_cloud
from chalicelib.core.log_tools import datadog, newrelic, stackdriver, elasticsearch, \

View file

@ -8,8 +8,9 @@ from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Re
import schemas
from chalicelib.core import scope
from chalicelib.core import errors, errors_viewed, errors_favorite, heatmaps, \
assist, signup, feature_flags
from chalicelib.core import errors, assist, signup, feature_flags
from chalicelib.core.metrics import heatmaps
from chalicelib.core.errors import errors_favorite, errors_viewed
from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_viewed, \
sessions_assignments, unprocessed_sessions
from chalicelib.core import tenants, users, projects, license

View file

@ -1,7 +1,3 @@
from fastapi import Body
import schemas
from chalicelib.core import product_analytics
from routers.base import get_routers
public_app, app, app_apikey = get_routers()

View file

@ -1,7 +1,7 @@
from typing import Union
import schemas
from chalicelib.core import dashboards, custom_metrics
from chalicelib.core.metrics import custom_metrics, dashboards
from fastapi import Body, Depends
from or_dependencies import OR_context
from routers.base import get_routers

View file

@ -1,8 +1,6 @@
from typing import Union
import schemas
from chalicelib.core import product_anaytics2
from fastapi import Body, Depends
from chalicelib.core.metrics import product_anaytics2
from fastapi import Depends
from or_dependencies import OR_context
from routers.base import get_routers

29
ee/api/.gitignore vendored
View file

@ -188,30 +188,38 @@ Pipfile.lock
/chalicelib/core/assist.py
/chalicelib/core/authorizers.py
/chalicelib/core/autocomplete/*
/chalicelib/core/boarding.py
/chalicelib/core/canvas.py
/chalicelib/core/collaborations/*
/chalicelib/core/countries.py
/chalicelib/core/metrics.py
/chalicelib/core/custom_metrics.py
/chalicelib/core/custom_metrics_predefined.py
/chalicelib/core/dashboards.py
/chalicelib/core/errors_favorite.py
/chalicelib/core/metrics/metrics.py
/chalicelib/core/metrics/custom_metrics.py
/chalicelib/core/metrics/custom_metrics_predefined.py
/chalicelib/core/metrics/dashboards.py
/chalicelib/core/metrics/funnels.py
/chalicelib/core/metrics/heatmaps.py
/chalicelib/core/metrics/heatmaps_ch.py
/chalicelib/core/metrics/metrics_ch.py
/chalicelib/core/events.py
/chalicelib/core/events_mobile.py
/chalicelib/core/feature_flags.py
/chalicelib/core/funnels.py
/chalicelib/core/issue_tracking/*.py
/chalicelib/core/issue_tracking/*
/chalicelib/core/issues.py
/chalicelib/core/jobs.py
/chalicelib/core/log_tools/*.py
/chalicelib/core/log_tools/*
/chalicelib/core/metadata.py
/chalicelib/core/mobile.py
/chalicelib/core/saved_search.py
/chalicelib/core/sessions/sessions.py
/chalicelib/core/sessions/sessions_ch.py
/chalicelib/core/sessions/sessions_devtool.py
/chalicelib/core/sessions/sessions_favorite.py
/chalicelib/core/sessions/sessions_assignments.py
/chalicelib/core/sessions/sessions_metas.py
/chalicelib/core/sessions/sessions_mobs.py
/chalicelib/core/sessions/sessions_replay.py
/chalicelib/core/sessions/performance_event.py
/chalicelib/core/sessions/sessions_viewed.py
/chalicelib/core/sessions/unprocessed_sessions.py
/chalicelib/core/significance.py
/chalicelib/core/socket_ios.py
@ -276,3 +284,8 @@ Pipfile.lock
/chalicelib/core/alerts/alerts_processor_ch.py
/chalicelib/core/alerts/alerts_listener.py
/chalicelib/core/alerts/modules/helpers.py
/chalicelib/core/errors/modules/*
/chalicelib/core/errors/errors.py
/chalicelib/core/errors/errors_ch.py
/chalicelib/core/errors/errors_favorite.py
/chalicelib/core/errors/errors_viewed.py

View file

@ -3,31 +3,9 @@ import logging
from decouple import config
logger = logging.getLogger(__name__)
from . import custom_metrics as custom_metrics_legacy
from . import custom_metrics_ee as custom_metrics
from . import metrics_ch as metrics
from . import metrics as metrics_legacy
if config("EXP_AUTOCOMPLETE", cast=bool, default=False):
logger.info(">>> Using experimental autocomplete")
else:
from . import autocomplete as autocomplete
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
logger.info(">>> Using experimental error search")
from . import errors as errors_legacy
from . import errors_exp as errors
if config("EXP_ERRORS_GET", cast=bool, default=False):
logger.info(">>> Using experimental error get")
else:
from . import errors as errors
if config("EXP_SESSIONS_SEARCH_METRIC", cast=bool, default=False):
logger.info(">>> Using experimental sessions search for metrics")
if config("EXP_FUNNELS", cast=bool, default=False):
logger.info(">>> Using experimental funnels")
from . import significance_exp as significance
else:
from . import significance as significance

View file

@ -1,119 +0,0 @@
from chalicelib.utils import pg_client
from chalicelib.core import log_tool_datadog, log_tool_stackdriver, log_tool_sentry
from chalicelib.core import users
from chalicelib.core import projects
def get_state(tenant_id):
pids = projects.get_projects_ids(tenant_id=tenant_id)
with pg_client.PostgresClient() as cur:
recorded = False
meta = False
if len(pids) > 0:
cur.execute(
cur.mogrify("""SELECT EXISTS(( SELECT 1
FROM public.sessions AS s
WHERE s.project_id IN %(ids)s)) AS exists;""",
{"ids": tuple(pids)})
)
recorded = cur.fetchone()["exists"]
meta = False
if recorded:
cur.execute(
cur.mogrify("""SELECT EXISTS((SELECT 1
FROM public.projects AS p
LEFT JOIN LATERAL ( SELECT 1
FROM public.sessions
WHERE sessions.project_id = p.project_id
AND sessions.user_id IS NOT NULL
LIMIT 1) AS sessions(user_id) ON (TRUE)
WHERE p.tenant_id = %(tenant_id)s AND p.deleted_at ISNULL
AND ( sessions.user_id IS NOT NULL OR p.metadata_1 IS NOT NULL
OR p.metadata_2 IS NOT NULL OR p.metadata_3 IS NOT NULL
OR p.metadata_4 IS NOT NULL OR p.metadata_5 IS NOT NULL
OR p.metadata_6 IS NOT NULL OR p.metadata_7 IS NOT NULL
OR p.metadata_8 IS NOT NULL OR p.metadata_9 IS NOT NULL
OR p.metadata_10 IS NOT NULL )
)) AS exists;"""
, {"tenant_id": tenant_id}))
meta = cur.fetchone()["exists"]
return [
{"task": "Install OpenReplay",
"done": recorded,
"URL": "https://docs.openreplay.com/getting-started/quick-start"},
{"task": "Identify Users",
"done": meta,
"URL": "https://docs.openreplay.com/data-privacy-security/metadata"},
{"task": "Invite Team Members",
"done": len(users.get_members(tenant_id=tenant_id)) > 1,
"URL": "https://app.openreplay.com/client/manage-users"},
{"task": "Integrations",
"done": len(log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \
or len(log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \
or len(log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0,
"URL": "https://docs.openreplay.com/integrations"}
]
def get_state_installing(tenant_id):
pids = projects.get_projects_ids(tenant_id=tenant_id)
with pg_client.PostgresClient() as cur:
recorded = False
if len(pids) > 0:
cur.execute(
cur.mogrify("""SELECT EXISTS(( SELECT 1
FROM public.sessions AS s
WHERE s.project_id IN %(ids)s)) AS exists;""",
{"ids": tuple(pids)})
)
recorded = cur.fetchone()["exists"]
return {"task": "Install OpenReplay",
"done": recorded,
"URL": "https://docs.openreplay.com/getting-started/quick-start"}
def get_state_identify_users(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""SELECT EXISTS((SELECT 1
FROM public.projects AS p
LEFT JOIN LATERAL ( SELECT 1
FROM public.sessions
WHERE sessions.project_id = p.project_id
AND sessions.user_id IS NOT NULL
LIMIT 1) AS sessions(user_id) ON (TRUE)
WHERE p.tenant_id = %(tenant_id)s AND p.deleted_at ISNULL
AND ( sessions.user_id IS NOT NULL OR p.metadata_1 IS NOT NULL
OR p.metadata_2 IS NOT NULL OR p.metadata_3 IS NOT NULL
OR p.metadata_4 IS NOT NULL OR p.metadata_5 IS NOT NULL
OR p.metadata_6 IS NOT NULL OR p.metadata_7 IS NOT NULL
OR p.metadata_8 IS NOT NULL OR p.metadata_9 IS NOT NULL
OR p.metadata_10 IS NOT NULL )
)) AS exists;"""
, {"tenant_id": tenant_id}))
meta = cur.fetchone()["exists"]
return {"task": "Identify Users",
"done": meta,
"URL": "https://docs.openreplay.com/data-privacy-security/metadata"}
def get_state_manage_users(tenant_id):
return {"task": "Invite Team Members",
"done": len(users.get_members(tenant_id=tenant_id)) > 1,
"URL": "https://app.openreplay.com/client/manage-users"}
def get_state_integrations(tenant_id):
return {"task": "Integrations",
"done": len(log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \
or len(log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \
or len(log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0,
"URL": "https://docs.openreplay.com/integrations"}

View file

@ -1,236 +0,0 @@
import json
import logging
from decouple import config
from fastapi import HTTPException, status
from .custom_metrics import *
import schemas
from chalicelib.core import funnels, issues, heatmaps, sessions_mobs, sessions_favorite, \
product_analytics, custom_metrics_predefined
from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.storage import extra
# TODO: fix this import
from . import errors as errors
# if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
# logging.info(">>> Using experimental error search")
# from . import errors_exp as errors
# else:
# from . import errors as errors
if config("EXP_SESSIONS_SEARCH_METRIC", cast=bool, default=False):
from chalicelib.core import sessions
else:
from chalicelib.core import sessions_legacy as sessions
logger = logging.getLogger(__name__)
# TODO: refactor this to split
# timeseries /
# table of errors / table of issues / table of browsers / table of devices / table of countries / table of URLs
# remove "table of" calls from this function
def __try_live(project_id, data: schemas.CardSchema):
results = []
for i, s in enumerate(data.series):
results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
view_type=data.view_type, metric_type=data.metric_type,
metric_of=data.metric_of, metric_value=data.metric_value))
return results
def __get_table_of_series(project_id, data: schemas.CardSchema):
results = []
for i, s in enumerate(data.series):
results.append(sessions.search2_table(data=s.filter, project_id=project_id, density=data.density,
metric_of=data.metric_of, metric_value=data.metric_value,
metric_format=data.metric_format))
return results
def __get_errors_list(project: schemas.ProjectContext, user_id, data: schemas.CardSchema):
if len(data.series) == 0:
return {
"total": 0,
"errors": []
}
return errors.search(data.series[0].filter, project_id=project.project_id, user_id=user_id)
def __get_sessions_list(project: schemas.ProjectContext, user_id, data: schemas.CardSchema):
if len(data.series) == 0:
logger.debug("empty series")
return {
"total": 0,
"sessions": []
}
return sessions.search_sessions(data=data.series[0].filter, project_id=project.project_id, user_id=user_id)
def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
# No need for this because UI is sending the full payload
# card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
# if card is None:
# return None
# metric: schemas.CardSchema = schemas.CardSchema(**card)
# metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id):
return None
results = []
for s in data.series:
results.append({"seriesId": s.series_id, "seriesName": s.name,
**sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
return results
def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
results = []
if len(data.series) == 0:
return results
for s in data.series:
if len(data.filters) > 0:
s.filter.filters += data.filters
s.filter = schemas.SessionsSearchPayloadSchema(**s.filter.model_dump(by_alias=True))
results.append({"seriesId": None, "seriesName": s.name,
**sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
return results
def create_card(project: schemas.ProjectContext, user_id, data: schemas.CardSchema, dashboard=False):
with pg_client.PostgresClient() as cur:
session_data = None
if data.metric_type == schemas.MetricType.HEAT_MAP:
if data.session_id is not None:
session_data = {"sessionId": data.session_id}
else:
session_data = __get_heat_map_chart(project=project, user_id=user_id,
data=data, include_mobs=False)
if session_data is not None:
session_data = {"sessionId": session_data["sessionId"]}
if session_data is not None:
# for EE only
keys = sessions_mobs. \
__get_mob_keys(project_id=project.project_id, session_id=session_data["sessionId"])
keys += sessions_mobs. \
__get_mob_keys_deprecated(session_id=session_data["sessionId"]) # To support old sessions
tag = config('RETENTION_L_VALUE', default='vault')
for k in keys:
try:
extra.tag_session(file_key=k, tag_value=tag)
except Exception as e:
logger.warning(f"!!!Error while tagging: {k} to {tag} for heatMap")
logger.error(str(e))
_data = {"session_data": json.dumps(session_data) if session_data is not None else None}
for i, s in enumerate(data.series):
for k in s.model_dump().keys():
_data[f"{k}_{i}"] = s.__getattribute__(k)
_data[f"index_{i}"] = i
_data[f"filter_{i}"] = s.filter.json()
series_len = len(data.series)
params = {"user_id": user_id, "project_id": project.project_id, **data.model_dump(), **_data,
"default_config": json.dumps(data.default_config.model_dump()), "card_info": None}
if data.metric_type == schemas.MetricType.PATH_ANALYSIS:
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))
query = """INSERT INTO metrics (project_id, user_id, name, is_public,
view_type, metric_type, metric_of, metric_value,
metric_format, default_config, thumbnail, data,
card_info)
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
%(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s,
%(card_info)s)
RETURNING metric_id"""
if len(data.series) > 0:
query = f"""WITH m AS ({query})
INSERT INTO metric_series(metric_id, index, name, filter)
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)"
for i in range(series_len)])}
RETURNING metric_id;"""
query = cur.mogrify(query, params)
cur.execute(query)
r = cur.fetchone()
if dashboard:
return r["metric_id"]
return {"data": get_card(metric_id=r["metric_id"], project_id=project.project_id, user_id=user_id)}
def delete_card(project_id, metric_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
UPDATE public.metrics
SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now())
WHERE project_id = %(project_id)s
AND metric_id = %(metric_id)s
AND (user_id = %(user_id)s OR is_public)
RETURNING data;""",
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id})
)
# for EE only
row = cur.fetchone()
if row:
if row["data"] and not sessions_favorite.favorite_session_exists(session_id=row["data"]["sessionId"]):
keys = sessions_mobs. \
__get_mob_keys(project_id=project_id, session_id=row["data"]["sessionId"])
keys += sessions_mobs. \
__get_mob_keys_deprecated(session_id=row["data"]["sessionId"]) # To support old sessions
tag = config('RETENTION_D_VALUE', default='default')
for k in keys:
try:
extra.tag_session(file_key=k, tag_value=tag)
except Exception as e:
logger.warning(f"!!!Error while tagging: {k} to {tag} for heatMap")
logger.error(str(e))
return {"state": "success"}
def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
data: schemas.CardSessionsSchema
# , range_value=None, start_date=None, end_date=None
):
# No need for this because UI is sending the full payload
# card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
# if card is None:
# return None
# metric: schemas.CardSchema = schemas.CardSchema(**card)
# metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
# if metric is None:
# return None
if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id):
return None
for s in data.series:
s.filter.startTimestamp = data.startTimestamp
s.filter.endTimestamp = data.endTimestamp
s.filter.limit = data.limit
s.filter.page = data.page
issues_list = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {})
issues_list = issues_list.get("significant", []) + issues_list.get("insignificant", [])
issue = None
for i in issues_list:
if i.get("issueId", "") == issue_id:
issue = i
break
if issue is None:
issue = issues.get(project_id=project_id, issue_id=issue_id)
if issue is not None:
issue = {**issue,
"affectedSessions": 0,
"affectedUsers": 0,
"conversionImpact": 0,
"lostConversions": 0,
"unaffectedSessions": 0}
return {"seriesId": s.series_id, "seriesName": s.name,
"sessions": sessions.search_sessions(user_id=user_id, project_id=project_id,
issue=issue, data=s.filter)
if issue is not None else {"total": 0, "sessions": []},
"issue": issue}

View file

@ -1,609 +0,0 @@
import json
from decouple import config
import schemas
from chalicelib.core import sourcemaps
from chalicelib.utils import errors_helper
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.metrics_helper import __get_step_size
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
from chalicelib.core import sessions_legacy as sessions
else:
from chalicelib.core import sessions
def get(error_id, family=False):
if family:
return get_batch([error_id])
with pg_client.PostgresClient() as cur:
# trying: return only 1 error, without event details
query = cur.mogrify(
# "SELECT * FROM events.errors AS e INNER JOIN public.errors AS re USING(error_id) WHERE error_id = %(error_id)s;",
"SELECT * FROM public.errors WHERE error_id = %(error_id)s LIMIT 1;",
{"error_id": error_id})
cur.execute(query=query)
result = cur.fetchone()
if result is not None:
result["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(result["stacktrace_parsed_at"])
return helper.dict_to_camel_case(result)
def get_batch(error_ids):
if len(error_ids) == 0:
return []
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""
WITH RECURSIVE error_family AS (
SELECT *
FROM public.errors
WHERE error_id IN %(error_ids)s
UNION
SELECT child_errors.*
FROM public.errors AS child_errors
INNER JOIN error_family ON error_family.error_id = child_errors.parent_error_id OR error_family.parent_error_id = child_errors.error_id
)
SELECT *
FROM error_family;""",
{"error_ids": tuple(error_ids)})
cur.execute(query=query)
errors = cur.fetchall()
for e in errors:
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
return helper.list_to_camel_case(errors)
def __flatten_sort_key_count_version(data, merge_nested=False):
if data is None:
return []
return sorted(
[
{
"name": f'{o["name"]}@{v["version"]}',
"count": v["count"]
} for o in data for v in o["partition"]
],
key=lambda o: o["count"], reverse=True) if merge_nested else \
[
{
"name": o["name"],
"count": o["count"],
} for o in data
]
def __process_tags(row):
return [
{"name": "browser", "partitions": __flatten_sort_key_count_version(data=row.get("browsers_partition"))},
{"name": "browser.ver",
"partitions": __flatten_sort_key_count_version(data=row.pop("browsers_partition"), merge_nested=True)},
{"name": "OS", "partitions": __flatten_sort_key_count_version(data=row.get("os_partition"))},
{"name": "OS.ver",
"partitions": __flatten_sort_key_count_version(data=row.pop("os_partition"), merge_nested=True)},
{"name": "device.family", "partitions": __flatten_sort_key_count_version(data=row.get("device_partition"))},
{"name": "device",
"partitions": __flatten_sort_key_count_version(data=row.pop("device_partition"), merge_nested=True)},
{"name": "country", "partitions": row.pop("country_partition")}
]
def get_details(project_id, error_id, user_id, **data):
pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24")
pg_sub_query24.append("error_id = %(error_id)s")
pg_sub_query30_session = __get_basic_constraints(time_constraint=True, chart=False,
startTime_arg_name="startDate30",
endTime_arg_name="endDate30", project_key="sessions.project_id")
pg_sub_query30_session.append("sessions.start_ts >= %(startDate30)s")
pg_sub_query30_session.append("sessions.start_ts <= %(endDate30)s")
pg_sub_query30_session.append("error_id = %(error_id)s")
pg_sub_query30_err = __get_basic_constraints(time_constraint=True, chart=False, startTime_arg_name="startDate30",
endTime_arg_name="endDate30", project_key="errors.project_id")
pg_sub_query30_err.append("sessions.project_id = %(project_id)s")
pg_sub_query30_err.append("sessions.start_ts >= %(startDate30)s")
pg_sub_query30_err.append("sessions.start_ts <= %(endDate30)s")
pg_sub_query30_err.append("error_id = %(error_id)s")
pg_sub_query30_err.append("source ='js_exception'")
pg_sub_query30 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size30")
pg_sub_query30.append("error_id = %(error_id)s")
pg_basic_query = __get_basic_constraints(time_constraint=False)
pg_basic_query.append("error_id = %(error_id)s")
with pg_client.PostgresClient() as cur:
data["startDate24"] = TimeUTC.now(-1)
data["endDate24"] = TimeUTC.now()
data["startDate30"] = TimeUTC.now(-30)
data["endDate30"] = TimeUTC.now()
density24 = int(data.get("density24", 24))
step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24, factor=1)
density30 = int(data.get("density30", 30))
step_size30 = __get_step_size(data["startDate30"], data["endDate30"], density30, factor=1)
params = {
"startDate24": data['startDate24'],
"endDate24": data['endDate24'],
"startDate30": data['startDate30'],
"endDate30": data['endDate30'],
"project_id": project_id,
"userId": user_id,
"step_size24": step_size24,
"step_size30": step_size30,
"error_id": error_id}
main_pg_query = f"""\
SELECT error_id,
name,
message,
users,
sessions,
last_occurrence,
first_occurrence,
last_session_id,
browsers_partition,
os_partition,
device_partition,
country_partition,
chart24,
chart30,
custom_tags
FROM (SELECT error_id,
name,
message,
COUNT(DISTINCT user_id) AS users,
COUNT(DISTINCT session_id) AS sessions
FROM public.errors
INNER JOIN events.errors AS s_errors USING (error_id)
INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query30_err)}
GROUP BY error_id, name, message) AS details
INNER JOIN (SELECT MAX(timestamp) AS last_occurrence,
MIN(timestamp) AS first_occurrence
FROM events.errors
WHERE error_id = %(error_id)s) AS time_details ON (TRUE)
INNER JOIN (SELECT session_id AS last_session_id,
coalesce(custom_tags, '[]')::jsonb AS custom_tags
FROM events.errors
LEFT JOIN LATERAL (
SELECT jsonb_agg(jsonb_build_object(errors_tags.key, errors_tags.value)) AS custom_tags
FROM errors_tags
WHERE errors_tags.error_id = %(error_id)s
AND errors_tags.session_id = errors.session_id
AND errors_tags.message_id = errors.message_id) AS errors_tags ON (TRUE)
WHERE error_id = %(error_id)s
ORDER BY errors.timestamp DESC
LIMIT 1) AS last_session_details ON (TRUE)
INNER JOIN (SELECT jsonb_agg(browser_details) AS browsers_partition
FROM (SELECT *
FROM (SELECT user_browser AS name,
COUNT(session_id) AS count
FROM events.errors
INNER JOIN sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query30_session)}
GROUP BY user_browser
ORDER BY count DESC) AS count_per_browser_query
INNER JOIN LATERAL (SELECT JSONB_AGG(version_details) AS partition
FROM (SELECT user_browser_version AS version,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query30_session)}
AND sessions.user_browser = count_per_browser_query.name
GROUP BY user_browser_version
ORDER BY count DESC) AS version_details
) AS browser_version_details ON (TRUE)) AS browser_details) AS browser_details ON (TRUE)
INNER JOIN (SELECT jsonb_agg(os_details) AS os_partition
FROM (SELECT *
FROM (SELECT user_os AS name,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query30_session)}
GROUP BY user_os
ORDER BY count DESC) AS count_per_os_details
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition
FROM (SELECT COALESCE(user_os_version,'unknown') AS version, COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query30_session)}
AND sessions.user_os = count_per_os_details.name
GROUP BY user_os_version
ORDER BY count DESC) AS count_per_version_details
GROUP BY count_per_os_details.name ) AS os_version_details
ON (TRUE)) AS os_details) AS os_details ON (TRUE)
INNER JOIN (SELECT jsonb_agg(device_details) AS device_partition
FROM (SELECT *
FROM (SELECT user_device_type AS name,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query30_session)}
GROUP BY user_device_type
ORDER BY count DESC) AS count_per_device_details
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_v_details) AS partition
FROM (SELECT CASE
WHEN user_device = '' OR user_device ISNULL
THEN 'unknown'
ELSE user_device END AS version,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query30_session)}
AND sessions.user_device_type = count_per_device_details.name
GROUP BY user_device
ORDER BY count DESC) AS count_per_device_v_details
GROUP BY count_per_device_details.name ) AS device_version_details
ON (TRUE)) AS device_details) AS device_details ON (TRUE)
INNER JOIN (SELECT jsonb_agg(count_per_country_details) AS country_partition
FROM (SELECT user_country AS name,
COUNT(session_id) AS count
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query30_session)}
GROUP BY user_country
ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE)
INNER JOIN (SELECT jsonb_agg(chart_details) AS chart24
FROM (SELECT generated_timestamp AS timestamp,
COUNT(session_id) AS count
FROM generate_series(%(startDate24)s, %(endDate24)s, %(step_size24)s) AS generated_timestamp
LEFT JOIN LATERAL (SELECT DISTINCT session_id
FROM events.errors
INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query24)}
) AS chart_details ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp) AS chart_details) AS chart_details24 ON (TRUE)
INNER JOIN (SELECT jsonb_agg(chart_details) AS chart30
FROM (SELECT generated_timestamp AS timestamp,
COUNT(session_id) AS count
FROM generate_series(%(startDate30)s, %(endDate30)s, %(step_size30)s) AS generated_timestamp
LEFT JOIN LATERAL (SELECT DISTINCT session_id
FROM events.errors INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query30)}) AS chart_details
ON (TRUE)
GROUP BY timestamp
ORDER BY timestamp) AS chart_details) AS chart_details30 ON (TRUE);
"""
# print("--------------------")
# print(cur.mogrify(main_pg_query, params))
# print("--------------------")
cur.execute(cur.mogrify(main_pg_query, params))
row = cur.fetchone()
if row is None:
return {"errors": ["error not found"]}
row["tags"] = __process_tags(row)
query = cur.mogrify(
f"""SELECT error_id, status, session_id, start_ts,
parent_error_id,session_id, user_anonymous_id,
user_id, user_uuid, user_browser, user_browser_version,
user_os, user_os_version, user_device, payload,
FALSE AS favorite,
True AS viewed
FROM public.errors AS pe
INNER JOIN events.errors AS ee USING (error_id)
INNER JOIN public.sessions USING (session_id)
WHERE pe.project_id = %(project_id)s
AND error_id = %(error_id)s
ORDER BY start_ts DESC
LIMIT 1;""",
{"project_id": project_id, "error_id": error_id, "user_id": user_id})
cur.execute(query=query)
status = cur.fetchone()
if status is not None:
row["stack"] = errors_helper.format_first_stack_frame(status).pop("stack")
row["status"] = status.pop("status")
row["parent_error_id"] = status.pop("parent_error_id")
row["favorite"] = status.pop("favorite")
row["viewed"] = status.pop("viewed")
row["last_hydrated_session"] = status
else:
row["stack"] = []
row["last_hydrated_session"] = None
row["status"] = "untracked"
row["parent_error_id"] = None
row["favorite"] = False
row["viewed"] = False
return {"data": helper.dict_to_camel_case(row)}
def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate",
endTime_arg_name="endDate", chart=False, step_size_name="step_size",
project_key="project_id"):
if project_key is None:
ch_sub_query = []
else:
ch_sub_query = [f"{project_key} =%(project_id)s"]
if time_constraint:
ch_sub_query += [f"timestamp >= %({startTime_arg_name})s",
f"timestamp < %({endTime_arg_name})s"]
if chart:
ch_sub_query += [f"timestamp >= generated_timestamp",
f"timestamp < generated_timestamp + %({step_size_name})s"]
if platform == schemas.PlatformType.MOBILE:
ch_sub_query.append("user_device_type = 'mobile'")
elif platform == schemas.PlatformType.DESKTOP:
ch_sub_query.append("user_device_type = 'desktop'")
return ch_sub_query
def __get_sort_key(key):
return {
schemas.ErrorSort.OCCURRENCE: "max_datetime",
schemas.ErrorSort.USERS_COUNT: "users",
schemas.ErrorSort.SESSIONS_COUNT: "sessions"
}.get(key, 'max_datetime')
def search(data: schemas.SearchErrorsSchema, project_id, user_id):
empty_response = {
'total': 0,
'errors': []
}
platform = None
for f in data.filters:
if f.type == schemas.FilterType.PLATFORM and len(f.value) > 0:
platform = f.value[0]
pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id")
pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
"pe.project_id=%(project_id)s"]
# To ignore Script error
pg_sub_query.append("pe.message!='Script error.'")
pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True, project_key=None)
if platform:
pg_sub_query_chart += ["start_ts>=%(startDate)s", "start_ts<%(endDate)s", "project_id=%(project_id)s"]
pg_sub_query_chart.append("errors.error_id =details.error_id")
statuses = []
error_ids = None
if data.startTimestamp is None:
data.startTimestamp = TimeUTC.now(-30)
if data.endTimestamp is None:
data.endTimestamp = TimeUTC.now(1)
if len(data.events) > 0 or len(data.filters) > 0:
print("-- searching for sessions before errors")
statuses = sessions.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True,
error_status=data.status)
if len(statuses) == 0:
return empty_response
error_ids = [e["errorId"] for e in statuses]
with pg_client.PostgresClient() as cur:
step_size = __get_step_size(data.startTimestamp, data.endTimestamp, data.density, factor=1)
sort = __get_sort_key('datetime')
if data.sort is not None:
sort = __get_sort_key(data.sort)
order = schemas.SortOrderType.DESC
if data.order is not None:
order = data.order
extra_join = ""
params = {
"startDate": data.startTimestamp,
"endDate": data.endTimestamp,
"project_id": project_id,
"userId": user_id,
"step_size": step_size}
if data.status != schemas.ErrorStatus.ALL:
pg_sub_query.append("status = %(error_status)s")
params["error_status"] = data.status
if data.limit is not None and data.page is not None:
params["errors_offset"] = (data.page - 1) * data.limit
params["errors_limit"] = data.limit
else:
params["errors_offset"] = 0
params["errors_limit"] = 200
if error_ids is not None:
params["error_ids"] = tuple(error_ids)
pg_sub_query.append("error_id IN %(error_ids)s")
# if data.bookmarked:
# pg_sub_query.append("ufe.user_id = %(userId)s")
# extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
if data.query is not None and len(data.query) > 0:
pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)")
params["error_query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator.CONTAINS)
main_pg_query = f"""SELECT full_count,
error_id,
name,
message,
users,
sessions,
last_occurrence,
first_occurrence,
chart
FROM (SELECT COUNT(details) OVER () AS full_count, details.*
FROM (SELECT error_id,
name,
message,
COUNT(DISTINCT COALESCE(user_id,user_uuid::text)) AS users,
COUNT(DISTINCT session_id) AS sessions,
MAX(timestamp) AS max_datetime,
MIN(timestamp) AS min_datetime
FROM events.errors
INNER JOIN public.errors AS pe USING (error_id)
INNER JOIN public.sessions USING (session_id)
{extra_join}
WHERE {" AND ".join(pg_sub_query)}
GROUP BY error_id, name, message
ORDER BY {sort} {order}) AS details
LIMIT %(errors_limit)s OFFSET %(errors_offset)s
) AS details
INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence,
MIN(timestamp) AS first_occurrence
FROM events.errors
WHERE errors.error_id = details.error_id) AS time_details ON (TRUE)
INNER JOIN LATERAL (SELECT jsonb_agg(chart_details) AS chart
FROM (SELECT generated_timestamp AS timestamp,
COUNT(session_id) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (SELECT DISTINCT session_id
FROM events.errors
{"INNER JOIN public.sessions USING(session_id)" if platform else ""}
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sessions ON (TRUE)
GROUP BY timestamp
ORDER BY timestamp) AS chart_details) AS chart_details ON (TRUE);"""
# print("--------------------")
# print(cur.mogrify(main_pg_query, params))
# print("--------------------")
cur.execute(cur.mogrify(main_pg_query, params))
rows = cur.fetchall()
total = 0 if len(rows) == 0 else rows[0]["full_count"]
if total == 0:
rows = []
else:
if len(statuses) == 0:
query = cur.mogrify(
"""SELECT error_id,
COALESCE((SELECT TRUE
FROM public.user_viewed_errors AS ve
WHERE errors.error_id = ve.error_id
AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed
FROM public.errors
WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
"user_id": user_id})
cur.execute(query=query)
statuses = helper.list_to_camel_case(cur.fetchall())
statuses = {
s["errorId"]: s for s in statuses
}
for r in rows:
r.pop("full_count")
if r["error_id"] in statuses:
r["viewed"] = statuses[r["error_id"]]["viewed"]
else:
r["viewed"] = False
return {
'total': total,
'errors': helper.list_to_camel_case(rows)
}
def __save_stacktrace(error_id, data):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""UPDATE public.errors
SET stacktrace=%(data)s::jsonb, stacktrace_parsed_at=timezone('utc'::text, now())
WHERE error_id = %(error_id)s;""",
{"error_id": error_id, "data": json.dumps(data)})
cur.execute(query=query)
def get_trace(project_id, error_id):
error = get(error_id=error_id, family=False)
if error is None:
return {"errors": ["error not found"]}
if error.get("source", "") != "js_exception":
return {"errors": ["this source of errors doesn't have a sourcemap"]}
if error.get("payload") is None:
return {"errors": ["null payload"]}
if error.get("stacktrace") is not None:
return {"sourcemapUploaded": True,
"trace": error.get("stacktrace"),
"preparsed": True}
trace, all_exists = sourcemaps.get_traces_group(project_id=project_id, payload=error["payload"])
if all_exists:
__save_stacktrace(error_id=error_id, data=trace)
return {"sourcemapUploaded": all_exists,
"trace": trace,
"preparsed": False}
def get_sessions(start_date, end_date, project_id, user_id, error_id):
extra_constraints = ["s.project_id = %(project_id)s",
"s.start_ts >= %(startDate)s",
"s.start_ts <= %(endDate)s",
"e.error_id = %(error_id)s"]
if start_date is None:
start_date = TimeUTC.now(-7)
if end_date is None:
end_date = TimeUTC.now()
params = {
"startDate": start_date,
"endDate": end_date,
"project_id": project_id,
"userId": user_id,
"error_id": error_id}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
f"""SELECT s.project_id,
s.session_id::text AS session_id,
s.user_uuid,
s.user_id,
s.user_agent,
s.user_os,
s.user_browser,
s.user_device,
s.user_country,
s.start_ts,
s.duration,
s.events_count,
s.pages_count,
s.errors_count,
s.issue_types,
COALESCE((SELECT TRUE
FROM public.user_favorite_sessions AS fs
WHERE s.session_id = fs.session_id
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS favorite,
COALESCE((SELECT TRUE
FROM public.user_viewed_sessions AS fs
WHERE s.session_id = fs.session_id
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
FROM public.sessions AS s INNER JOIN events.errors AS e USING (session_id)
WHERE {" AND ".join(extra_constraints)}
ORDER BY s.start_ts DESC;""",
params)
cur.execute(query=query)
sessions_list = []
total = cur.rowcount
row = cur.fetchone()
while row is not None and len(sessions_list) < 100:
sessions_list.append(row)
row = cur.fetchone()
return {
'total': total,
'sessions': helper.list_to_camel_case(sessions_list)
}
ACTION_STATE = {
"unsolve": 'unresolved',
"solve": 'resolved',
"ignore": 'ignored'
}
def change_state(project_id, user_id, error_id, action):
errors = get(error_id, family=True)
print(len(errors))
status = ACTION_STATE.get(action)
if errors is None or len(errors) == 0:
return {"errors": ["error not found"]}
if errors[0]["status"] == status:
return {"errors": [f"error is already {status}"]}
if errors[0]["status"] == ACTION_STATE["solve"] and status == ACTION_STATE["ignore"]:
return {"errors": [f"state transition not permitted {errors[0]['status']} -> {status}"]}
params = {
"userId": user_id,
"error_ids": tuple([e["errorId"] for e in errors]),
"status": status}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""UPDATE public.errors
SET status = %(status)s
WHERE error_id IN %(error_ids)s
RETURNING status""",
params)
cur.execute(query=query)
row = cur.fetchone()
if row is not None:
for e in errors:
e["status"] = row["status"]
return {"data": errors}

View file

@ -0,0 +1,14 @@
import logging
from decouple import config
logger = logging.getLogger(__name__)
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
logger.info(">>> Using experimental error search")
from . import errors as errors_legacy
from . import errors_ch as errors
else:
from . import errors
from . import errors_viewed_ee as errors_viewed

View file

@ -1,13 +1,14 @@
import logging
from decouple import config
from chalicelib.core.errors.errors_viewed import *
from chalicelib.utils import ch_client, exp_ch_helper
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
_add_viewed_error = add_viewed_error
logger = logging.getLogger(__name__)
def add_viewed_error(project_id, user_id, error_id):
_add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id)
with ch_client.ClickHouseClient() as cur:
query = f"""INSERT INTO {exp_ch_helper.get_user_viewed_errors_table()}(project_id,user_id, error_id)
VALUES (%(project_id)s,%(userId)s,%(error_id)s);"""

View file

@ -1,39 +0,0 @@
from chalicelib.utils import pg_client
from chalicelib.core import errors_viewed_exp
def add_viewed_error(project_id, user_id, error_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""INSERT INTO public.user_viewed_errors(user_id, error_id)
VALUES (%(userId)s,%(error_id)s);""",
{"userId": user_id, "error_id": error_id})
)
errors_viewed_exp.add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id)
def viewed_error_exists(user_id, error_id):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""SELECT
errors.error_id AS hydrated,
COALESCE((SELECT TRUE
FROM public.user_viewed_errors AS ve
WHERE ve.error_id = %(error_id)s
AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
FROM public.errors
WHERE error_id = %(error_id)s""",
{"userId": user_id, "error_id": error_id})
cur.execute(
query=query
)
r = cur.fetchone()
if r:
return r.get("viewed")
return True
def viewed_error(project_id, user_id, error_id):
if viewed_error_exists(user_id=user_id, error_id=error_id):
return None
return add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id)

View file

@ -1,223 +0,0 @@
from typing import Optional
from decouple import config
import schemas
from chalicelib.core import issues
from chalicelib.core.sessions import sessions_metas
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.event_filter_definition import SupportedFilter, Event
if config("EXP_AUTOCOMPLETE", cast=bool, default=False):
from . import autocomplete_exp as autocomplete
else:
from . import autocomplete as autocomplete
def get_customs_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
SELECT
c.*,
'CUSTOM' AS type
FROM events_common.customs AS c
WHERE
c.session_id = %(session_id)s
ORDER BY c.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows = cur.fetchall()
return helper.dict_to_camel_case(rows)
def __merge_cells(rows, start, count, replacement):
rows[start] = replacement
rows = rows[:start + 1] + rows[start + count:]
return rows
def __get_grouped_clickrage(rows, session_id, project_id):
click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id)
if len(click_rage_issues) == 0:
return rows
for c in click_rage_issues:
merge_count = c.get("payload")
if merge_count is not None:
merge_count = merge_count.get("Count", 3)
else:
merge_count = 3
for i in range(len(rows)):
if rows[i]["timestamp"] == c["timestamp"]:
rows = __merge_cells(rows=rows,
start=i,
count=merge_count,
replacement={**rows[i], "type": "CLICKRAGE", "count": merge_count})
break
return rows
def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
with pg_client.PostgresClient() as cur:
rows = []
if event_type is None or event_type == schemas.EventType.CLICK:
cur.execute(cur.mogrify("""\
SELECT
c.*,
'CLICK' AS type
FROM events.clicks AS c
WHERE
c.session_id = %(session_id)s
ORDER BY c.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows += cur.fetchall()
if group_clickrage:
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
if event_type is None or event_type == schemas.EventType.INPUT:
cur.execute(cur.mogrify("""
SELECT
i.*,
'INPUT' AS type
FROM events.inputs AS i
WHERE
i.session_id = %(session_id)s
ORDER BY i.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows += cur.fetchall()
if event_type is None or event_type == schemas.EventType.LOCATION:
cur.execute(cur.mogrify("""\
SELECT
l.*,
l.path AS value,
l.path AS url,
'LOCATION' AS type
FROM events.pages AS l
WHERE
l.session_id = %(session_id)s
ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
rows += cur.fetchall()
rows = helper.list_to_camel_case(rows)
rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"]))
return rows
def _search_tags(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
query = f"""
SELECT public.tags.name
'TAG' AS type
FROM public.tags
WHERE public.tags.project_id = %(project_id)s
ORDER BY SIMILARITY(public.tags.name, %(value)s) DESC
LIMIT 10
"""
query = cur.mogrify(query, {'project_id': project_id, 'value': value})
cur.execute(query)
results = helper.list_to_camel_case(cur.fetchall())
return results
class EventType:
CLICK = Event(ui_type=schemas.EventType.CLICK, table="events.clicks", column="label")
INPUT = Event(ui_type=schemas.EventType.INPUT, table="events.inputs", column="label")
LOCATION = Event(ui_type=schemas.EventType.LOCATION, table="events.pages", column="path")
CUSTOM = Event(ui_type=schemas.EventType.CUSTOM, table="events_common.customs", column="name")
REQUEST = Event(ui_type=schemas.EventType.REQUEST, table="events_common.requests", column="path")
GRAPHQL = Event(ui_type=schemas.EventType.GRAPHQL, table="events.graphql", column="name")
STATEACTION = Event(ui_type=schemas.EventType.STATE_ACTION, table="events.state_actions", column="name")
TAG = Event(ui_type=schemas.EventType.TAG, table="events.tags", column="tag_id")
ERROR = Event(ui_type=schemas.EventType.ERROR, table="events.errors",
column=None) # column=None because errors are searched by name or message
METADATA = Event(ui_type=schemas.FilterType.METADATA, table="public.sessions", column=None)
# MOBILE
CLICK_MOBILE = Event(ui_type=schemas.EventType.CLICK_MOBILE, table="events_ios.taps", column="label")
INPUT_MOBILE = Event(ui_type=schemas.EventType.INPUT_MOBILE, table="events_ios.inputs", column="label")
VIEW_MOBILE = Event(ui_type=schemas.EventType.VIEW_MOBILE, table="events_ios.views", column="name")
SWIPE_MOBILE = Event(ui_type=schemas.EventType.SWIPE_MOBILE, table="events_ios.swipes", column="label")
CUSTOM_MOBILE = Event(ui_type=schemas.EventType.CUSTOM_MOBILE, table="events_common.customs", column="name")
REQUEST_MOBILE = Event(ui_type=schemas.EventType.REQUEST_MOBILE, table="events_common.requests", column="path")
CRASH_MOBILE = Event(ui_type=schemas.EventType.ERROR_MOBILE, table="events_common.crashes",
column=None) # column=None because errors are searched by name or message
SUPPORTED_TYPES = {
EventType.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK),
query=autocomplete.__generic_query(typename=EventType.CLICK.ui_type)),
EventType.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT),
query=autocomplete.__generic_query(typename=EventType.INPUT.ui_type)),
EventType.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.LOCATION),
query=autocomplete.__generic_query(
typename=EventType.LOCATION.ui_type)),
EventType.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM),
query=autocomplete.__generic_query(typename=EventType.CUSTOM.ui_type)),
EventType.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST),
query=autocomplete.__generic_query(
typename=EventType.REQUEST.ui_type)),
EventType.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.GRAPHQL),
query=autocomplete.__generic_query(
typename=EventType.GRAPHQL.ui_type)),
EventType.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.STATEACTION),
query=autocomplete.__generic_query(
typename=EventType.STATEACTION.ui_type)),
EventType.TAG.ui_type: SupportedFilter(get=_search_tags, query=None),
EventType.ERROR.ui_type: SupportedFilter(get=autocomplete.__search_errors,
query=None),
EventType.METADATA.ui_type: SupportedFilter(get=autocomplete.__search_metadata,
query=None),
# IOS
EventType.CLICK_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK_MOBILE),
query=autocomplete.__generic_query(
typename=EventType.CLICK_MOBILE.ui_type)),
EventType.INPUT_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT_MOBILE),
query=autocomplete.__generic_query(
typename=EventType.INPUT_MOBILE.ui_type)),
EventType.VIEW_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.VIEW_MOBILE),
query=autocomplete.__generic_query(
typename=EventType.VIEW_MOBILE.ui_type)),
EventType.CUSTOM_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM_MOBILE),
query=autocomplete.__generic_query(
typename=EventType.CUSTOM_MOBILE.ui_type)),
EventType.REQUEST_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST_MOBILE),
query=autocomplete.__generic_query(
typename=EventType.REQUEST_MOBILE.ui_type)),
EventType.CRASH_MOBILE.ui_type: SupportedFilter(get=autocomplete.__search_errors_mobile,
query=None),
}
def get_errors_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
FROM {EventType.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
errors = cur.fetchall()
for e in errors:
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
return helper.list_to_camel_case(errors)
def search(text, event_type, project_id, source, key):
if not event_type:
return {"data": autocomplete.__get_autocomplete_table(text, project_id)}
if event_type in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source)
# for MOBILE events autocomplete
# if event_type + "_MOBILE" in SUPPORTED_TYPES.keys():
# rows += SUPPORTED_TYPES[event_type + "_MOBILE"].get(project_id=project_id, value=text, key=key,source=source)
elif event_type + "_MOBILE" in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type + "_MOBILE"].get(project_id=project_id, value=text, key=key, source=source)
elif event_type in sessions_metas.SUPPORTED_TYPES.keys():
return sessions_metas.search(text, event_type, project_id)
elif event_type.endswith("_MOBILE") \
and event_type[:-len("_MOBILE")] in sessions_metas.SUPPORTED_TYPES.keys():
return sessions_metas.search(text, event_type, project_id)
else:
return {"errors": ["unsupported event"]}
return {"data": rows}

View file

@ -1,67 +0,0 @@
import schemas
from chalicelib.utils import pg_client
def get_global_integrations_status(tenant_id, user_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""\
SELECT EXISTS((SELECT 1
FROM public.oauth_authentication
WHERE user_id = %(user_id)s
AND provider = 'github')) AS {schemas.IntegrationType.GITHUB.value},
EXISTS((SELECT 1
FROM public.jira_cloud
WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.JIRA.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='bugsnag')) AS {schemas.IntegrationType.BUGSNAG.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='cloudwatch')) AS {schemas.IntegrationType.CLOUDWATCH.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='datadog')) AS {schemas.IntegrationType.DATADOG.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='newrelic')) AS {schemas.IntegrationType.NEWRELIC.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='rollbar')) AS {schemas.IntegrationType.ROLLBAR.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='sentry')) AS {schemas.IntegrationType.SENTRY.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='stackdriver')) AS {schemas.IntegrationType.STACKDRIVER.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='sumologic')) AS {schemas.IntegrationType.SUMOLOGIC.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='elasticsearch')) AS {schemas.IntegrationType.ELASTICSEARCH.value},
EXISTS((SELECT 1
FROM public.webhooks
WHERE type='slack' AND tenant_id=%(tenant_id)s AND deleted_at ISNULL)) AS {schemas.IntegrationType.SLACK.value},
EXISTS((SELECT 1
FROM public.webhooks
WHERE type='msteams' AND tenant_id=%(tenant_id)s AND deleted_at ISNULL)) AS {schemas.IntegrationType.MS_TEAMS.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s AND provider='dynatrace')) AS {schemas.IntegrationType.DYNATRACE.value};""",
{"user_id": user_id, "tenant_id": tenant_id, "project_id": project_id})
)
current_integrations = cur.fetchone()
result = []
for k in current_integrations.keys():
result.append({"name": k, "integrated": current_integrations[k]})
return result

View file

@ -1 +0,0 @@
TENANT_CONDITION = "tenant_id=%(tenant_id)s"

View file

@ -0,0 +1,9 @@
import logging
from decouple import config
logger = logging.getLogger(__name__)
from chalicelib.core.metrics import heatmaps_ch as heatmaps
from chalicelib.core.metrics import metrics_ch as metrics
from chalicelib.core.metrics import custom_metrics_ee as custom_metrics

View file

@ -0,0 +1,99 @@
import json
import logging
from decouple import config
from chalicelib.utils.storage import extra
from chalicelib.core.sessions import sessions_mobs, sessions_favorite
from .custom_metrics import *
def create_card(project: schemas.ProjectContext, user_id, data: schemas.CardSchema, dashboard=False):
with pg_client.PostgresClient() as cur:
session_data = None
if data.metric_type == schemas.MetricType.HEAT_MAP:
if data.session_id is not None:
session_data = {"sessionId": data.session_id}
else:
session_data = __get_heat_map_chart(project=project, user_id=user_id,
data=data, include_mobs=False)
if session_data is not None:
session_data = {"sessionId": session_data["sessionId"]}
if session_data is not None:
# for EE only
keys = sessions_mobs. \
__get_mob_keys(project_id=project.project_id, session_id=session_data["sessionId"])
keys += sessions_mobs. \
__get_mob_keys_deprecated(session_id=session_data["sessionId"]) # To support old sessions
tag = config('RETENTION_L_VALUE', default='vault')
for k in keys:
try:
extra.tag_session(file_key=k, tag_value=tag)
except Exception as e:
logger.warning(f"!!!Error while tagging: {k} to {tag} for heatMap")
logger.error(str(e))
_data = {"session_data": json.dumps(session_data) if session_data is not None else None}
for i, s in enumerate(data.series):
for k in s.model_dump().keys():
_data[f"{k}_{i}"] = s.__getattribute__(k)
_data[f"index_{i}"] = i
_data[f"filter_{i}"] = s.filter.json()
series_len = len(data.series)
params = {"user_id": user_id, "project_id": project.project_id, **data.model_dump(), **_data,
"default_config": json.dumps(data.default_config.model_dump()), "card_info": None}
if data.metric_type == schemas.MetricType.PATH_ANALYSIS:
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))
query = """INSERT INTO metrics (project_id, user_id, name, is_public,
view_type, metric_type, metric_of, metric_value,
metric_format, default_config, thumbnail, data,
card_info)
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
%(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s,
%(card_info)s)
RETURNING metric_id"""
if len(data.series) > 0:
query = f"""WITH m AS ({query})
INSERT INTO metric_series(metric_id, index, name, filter)
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)"
for i in range(series_len)])}
RETURNING metric_id;"""
query = cur.mogrify(query, params)
cur.execute(query)
r = cur.fetchone()
if dashboard:
return r["metric_id"]
return {"data": get_card(metric_id=r["metric_id"], project_id=project.project_id, user_id=user_id)}
def delete_card(project_id, metric_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
UPDATE public.metrics
SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now())
WHERE project_id = %(project_id)s
AND metric_id = %(metric_id)s
AND (user_id = %(user_id)s OR is_public)
RETURNING data;""",
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id})
)
# for EE only
row = cur.fetchone()
if row:
if row["data"] and not sessions_favorite.favorite_session_exists(session_id=row["data"]["sessionId"]):
keys = sessions_mobs. \
__get_mob_keys(project_id=project_id, session_id=row["data"]["sessionId"])
keys += sessions_mobs. \
__get_mob_keys_deprecated(session_id=row["data"]["sessionId"]) # To support old sessions
tag = config('RETENTION_D_VALUE', default='default')
for k in keys:
try:
extra.tag_session(file_key=k, tag_value=tag)
except Exception as e:
logger.warning(f"!!!Error while tagging: {k} to {tag} for heatMap")
logger.error(str(e))
return {"state": "success"}

View file

@ -1 +1,2 @@
TENANT_CONDITION = "tenant_id = %(tenant_id)s"
MOB_KEY="encode(file_key,'hex') AS file_key,"

View file

@ -1,8 +1,8 @@
from typing import List
import schemas
from chalicelib.core.metrics_ch import __get_basic_constraints, __get_meta_constraint
from chalicelib.core.metrics_ch import __get_constraint_values, __complete_missing_steps
from chalicelib.core.metrics.metrics_ch import __get_basic_constraints, __get_meta_constraint, __get_constraint_values, \
__complete_missing_steps
from chalicelib.utils import ch_client, exp_ch_helper
from chalicelib.utils import helper, dev
from chalicelib.utils.TimeUTC import TimeUTC

View file

@ -10,3 +10,7 @@ if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
from . import sessions_ch as sessions
else:
from . import sessions
from chalicelib.core.sessions import sessions_devtool_ee as sessions_devtool
from chalicelib.core.sessions import sessions_viewed_ee as sessions_viewed
from chalicelib.core.sessions import sessions_favorite_ee as sessions_favorite

View file

@ -1,39 +0,0 @@
from decouple import config
from fastapi.security import SecurityScopes
import schemas
from chalicelib.core import permissions
from chalicelib.utils.storage import StorageClient
SCOPES = SecurityScopes([schemas.Permissions.DEV_TOOLS])
def __get_devtools_keys(project_id, session_id):
params = {
"sessionId": session_id,
"projectId": project_id
}
return [
config("DEVTOOLS_MOB_PATTERN", default="%(sessionId)sdevtools") % params
]
def get_urls(session_id, project_id, context: schemas.CurrentContext, check_existence: bool = True):
if not permissions.check(security_scopes=SCOPES, context=context):
return []
results = []
for k in __get_devtools_keys(project_id=project_id, session_id=session_id):
if check_existence and not StorageClient.exists(bucket=config("sessions_bucket"), key=k):
continue
results.append(StorageClient.get_presigned_url_for_sharing(
bucket=config("sessions_bucket"),
expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900),
key=k
))
return results
def delete_mobs(project_id, session_ids):
for session_id in session_ids:
for k in __get_devtools_keys(project_id=project_id, session_id=session_id):
StorageClient.tag_for_deletion(bucket=config("sessions_bucket"), key=k)

View file

@ -0,0 +1,13 @@
from fastapi.security import SecurityScopes
from chalicelib.core import permissions
from chalicelib.core.sessions.sessions_devtool import *
_get_urls = get_urls
SCOPES = SecurityScopes([schemas.Permissions.DEV_TOOLS])
def get_urls(session_id, project_id, context: schemas.CurrentContext, check_existence: bool = True):
if not permissions.check(security_scopes=SCOPES, context=context):
return []
return _get_urls(session_id=session_id, project_id=project_id, context=context, check_existence=check_existence)

View file

@ -1,97 +0,0 @@
import schemas
from chalicelib.core import sessions_favorite_exp, sessions_mobs, sessions_devtool
from chalicelib.utils import pg_client
from chalicelib.utils.storage import extra
from decouple import config
def add_favorite_session(context: schemas.CurrentContext, project_id, session_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""\
INSERT INTO public.user_favorite_sessions(user_id, session_id)
VALUES (%(userId)s,%(session_id)s)
RETURNING session_id;""",
{"userId": context.user_id, "session_id": session_id})
)
row = cur.fetchone()
if row:
sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id)
return {"data": {"sessionId": session_id}}
return {"errors": ["something went wrong"]}
def remove_favorite_session(context: schemas.CurrentContext, project_id, session_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""\
DELETE FROM public.user_favorite_sessions
WHERE user_id = %(userId)s
AND session_id = %(session_id)s
RETURNING session_id;""",
{"userId": context.user_id, "session_id": session_id})
)
row = cur.fetchone()
if row:
sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id)
return {"data": {"sessionId": session_id}}
return {"errors": ["something went wrong"]}
def favorite_session(context: schemas.CurrentContext, project_id, session_id):
keys = sessions_mobs.__get_mob_keys(project_id=project_id, session_id=session_id)
keys += sessions_mobs.__get_mob_keys_deprecated(session_id=session_id) # To support old sessions
keys += sessions_devtool.__get_devtools_keys(project_id=project_id, session_id=session_id)
if favorite_session_exists(user_id=context.user_id, session_id=session_id):
tag = config('RETENTION_D_VALUE', default='default')
for k in keys:
try:
extra.tag_session(file_key=k, tag_value=tag)
except Exception as e:
print(f"!!!Error while tagging: {k} to {tag} for removal")
print(str(e))
return remove_favorite_session(context=context, project_id=project_id, session_id=session_id)
tag = config('RETENTION_L_VALUE', default='vault')
for k in keys:
try:
extra.tag_session(file_key=k, tag_value=tag)
except Exception as e:
print(f"!!!Error while tagging: {k} to {tag} for vault")
print(str(e))
return add_favorite_session(context=context, project_id=project_id, session_id=session_id)
def favorite_session_exists(session_id, user_id=None):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""SELECT session_id
FROM public.user_favorite_sessions
WHERE
session_id = %(session_id)s
{'AND user_id = %(userId)s' if user_id else ''};""",
{"userId": user_id, "session_id": session_id})
)
r = cur.fetchone()
return r is not None
def get_start_end_timestamp(project_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT max(start_ts) AS max_start_ts, min(start_ts) AS min_start_ts
FROM public.user_favorite_sessions INNER JOIN sessions USING(session_id)
WHERE
user_favorite_sessions.user_id = %(userId)s
AND project_id = %(project_id)s;""",
{"userId": user_id, "project_id": project_id})
)
r = cur.fetchone()
return (0, 0) if r is None else (r["min_start_ts"], r["max_start_ts"])

View file

@ -0,0 +1,75 @@
import logging
from decouple import config
from chalicelib.utils import ch_client, exp_ch_helper
logger = logging.getLogger(__name__)
from chalicelib.core.sessions import sessions_mobs, sessions_devtool
from chalicelib.core.sessions.sessions_favorite import *
from chalicelib.utils.storage import extra
_add_favorite_session = add_favorite_session
_remove_favorite_session = remove_favorite_session
def add_favorite_session(context: schemas.CurrentContext, project_id, session_id):
result = _add_favorite_session(context=context, project_id=project_id, session_id=session_id)
if "data" in result:
add_favorite_session_to_ch(project_id=project_id, user_id=context.user_id,
session_id=session_id)
return result
def remove_favorite_session(context: schemas.CurrentContext, project_id, session_id):
result = _remove_favorite_session(context=context, project_id=project_id, session_id=session_id)
if "data" in result:
remove_favorite_session_from_ch(project_id=project_id, user_id=context.user_id,
session_id=session_id)
return result
def favorite_session(context: schemas.CurrentContext, project_id, session_id):
keys = sessions_mobs.__get_mob_keys(project_id=project_id, session_id=session_id)
keys += sessions_mobs.__get_mob_keys_deprecated(session_id=session_id) # To support old sessions
keys += sessions_devtool.__get_devtools_keys(project_id=project_id, session_id=session_id)
if favorite_session_exists(user_id=context.user_id, session_id=session_id):
tag = config('RETENTION_D_VALUE', default='default')
for k in keys:
try:
extra.tag_session(file_key=k, tag_value=tag)
except Exception as e:
print(f"!!!Error while tagging: {k} to {tag} for removal")
print(str(e))
return remove_favorite_session(context=context, project_id=project_id, session_id=session_id)
tag = config('RETENTION_L_VALUE', default='vault')
for k in keys:
try:
extra.tag_session(file_key=k, tag_value=tag)
except Exception as e:
print(f"!!!Error while tagging: {k} to {tag} for vault")
print(str(e))
return add_favorite_session(context=context, project_id=project_id, session_id=session_id)
def add_favorite_session_to_ch(project_id, user_id, session_id, sign=1):
try:
with ch_client.ClickHouseClient() as cur:
query = f"""INSERT INTO {exp_ch_helper.get_user_favorite_sessions_table()}(project_id,user_id, session_id, sign)
VALUES (%(project_id)s,%(userId)s,%(sessionId)s,%(sign)s);"""
params = {"userId": user_id, "sessionId": session_id, "project_id": project_id, "sign": sign}
cur.execute(query=query, params=params)
except Exception as err:
logger.error("------- Exception while adding favorite session to CH")
logger.error(err)
def remove_favorite_session_from_ch(project_id, user_id, session_id):
add_favorite_session_to_ch(project_id=project_id, user_id=user_id, session_id=session_id, sign=-1)

View file

@ -1,24 +0,0 @@
import logging
from decouple import config
from chalicelib.utils import ch_client, exp_ch_helper
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
def add_favorite_session(project_id, user_id, session_id, sign=1):
try:
with ch_client.ClickHouseClient() as cur:
query = f"""INSERT INTO {exp_ch_helper.get_user_favorite_sessions_table()}(project_id,user_id, session_id, sign)
VALUES (%(project_id)s,%(userId)s,%(sessionId)s,%(sign)s);"""
params = {"userId": user_id, "sessionId": session_id, "project_id": project_id, "sign": sign}
cur.execute(query=query, params=params)
except Exception as err:
logging.error("------- Exception while adding favorite session to CH")
logging.error(err)
def remove_favorite_session(project_id, user_id, session_id):
add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id, sign=-1)

View file

@ -4,8 +4,8 @@ from urllib.parse import urljoin
from decouple import config
import schemas
from chalicelib.core.collaboration_msteams import MSTeams
from chalicelib.core.collaboration_slack import Slack
from chalicelib.core.collaborations.collaboration_msteams import MSTeams
from chalicelib.core.collaborations.collaboration_slack import Slack
from chalicelib.utils import pg_client, helper
from chalicelib.utils import sql_helper as sh
from chalicelib.utils.TimeUTC import TimeUTC

View file

@ -1,157 +0,0 @@
import schemas
from chalicelib.core import events, metadata, events_mobile, \
sessions_mobs, issues, assist, sessions_devtool, canvas, user_testing
from chalicelib.utils import errors_helper
from chalicelib.utils import pg_client, helper
def __is_mobile_session(platform):
return platform in ('ios', 'android')
def __group_metadata(session, project_metadata):
meta = {}
for m in project_metadata.keys():
if project_metadata[m] is not None and session.get(m) is not None:
meta[project_metadata[m]] = session[m]
session.pop(m)
return meta
def get_pre_replay(project_id, session_id):
return {
'domURL': [sessions_mobs.get_first_url(project_id=project_id, session_id=session_id, check_existence=False)]}
# This function should not use Clickhouse because it doesn't have `file_key`
def get_replay(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False,
group_metadata=False, live=True):
with pg_client.PostgresClient() as cur:
extra_query = []
if include_fav_viewed:
extra_query.append("""COALESCE((SELECT TRUE
FROM public.user_favorite_sessions AS fs
WHERE s.session_id = fs.session_id
AND fs.user_id = %(userId)s), FALSE) AS favorite""")
extra_query.append("""COALESCE((SELECT TRUE
FROM public.user_viewed_sessions AS fs
WHERE s.session_id = fs.session_id
AND fs.user_id = %(userId)s), FALSE) AS viewed""")
query = cur.mogrify(
f"""\
SELECT
s.*,
s.session_id::text AS session_id,
encode(file_key,'hex') AS file_key,
(SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key
{"," if len(extra_query) > 0 else ""}{",".join(extra_query)}
{(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''}
FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""}
WHERE s.project_id = %(project_id)s
AND s.session_id = %(session_id)s;""",
{"project_id": project_id, "session_id": session_id, "userId": context.user_id}
)
cur.execute(query=query)
data = cur.fetchone()
if data is not None:
data = helper.dict_to_camel_case(data)
if full_data:
if __is_mobile_session(data["platform"]):
data['mobsUrl'] = []
data['videoURL'] = sessions_mobs.get_mobile_videos(session_id=session_id, project_id=project_id,
check_existence=False)
else:
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False)
# for EE
# context is required to check if the use have the right to access devtools
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
context=context, check_existence=False)
data['canvasURL'] = canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id)
if user_testing.has_test_signals(session_id=session_id, project_id=project_id):
data['utxVideo'] = user_testing.get_ux_webcam_signed_url(session_id=session_id,
project_id=project_id,
check_existence=False)
else:
data['utxVideo'] = []
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id,
check_existence=False)
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id,
project_key=data["projectKey"])
data["inDB"] = True
return data
elif live:
return assist.get_live_session_by_id(project_id=project_id, session_id=session_id)
else:
return None
def get_events(project_id, session_id):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
f"""SELECT session_id, platform, start_ts, duration
FROM public.sessions AS s
WHERE s.project_id = %(project_id)s
AND s.session_id = %(session_id)s;""",
{"project_id": project_id, "session_id": session_id}
)
cur.execute(query=query)
s_data = cur.fetchone()
if s_data is not None:
s_data = helper.dict_to_camel_case(s_data)
data = {}
if __is_mobile_session(s_data["platform"]):
data['events'] = events_mobile.get_by_sessionId(project_id=project_id, session_id=session_id)
for e in data['events']:
if e["type"].endswith("_IOS"):
e["type"] = e["type"][:-len("_IOS")]
elif e["type"].endswith("_MOBILE"):
e["type"] = e["type"][:-len("_MOBILE")]
data['crashes'] = events_mobile.get_crashes_by_session_id(session_id=session_id)
data['userEvents'] = events_mobile.get_customs_by_session_id(project_id=project_id,
session_id=session_id)
data['userTesting'] = []
else:
data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id,
group_clickrage=True)
all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id)
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
# to keep only the first stack
# limit the number of errors to reduce the response-body size
data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors
if e['source'] == "js_exception"][:500]
data['userEvents'] = events.get_customs_by_session_id(project_id=project_id,
session_id=session_id)
data['userTesting'] = user_testing.get_test_signals(session_id=session_id, project_id=project_id)
data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id)
data['issues'] = reduce_issues(data['issues'])
return data
else:
return None
# To reduce the number of issues in the replay;
# will be removed once we agree on how to show issues
def reduce_issues(issues_list):
if issues_list is None:
return None
i = 0
# remove same-type issues if the time between them is <2s
while i < len(issues_list) - 1:
for j in range(i + 1, len(issues_list)):
if issues_list[i]["type"] == issues_list[j]["type"]:
break
else:
i += 1
break
if issues_list[i]["timestamp"] - issues_list[j]["timestamp"] < 2000:
issues_list.pop(j)
else:
i += 1
return issues_list

View file

@ -1,13 +0,0 @@
from chalicelib.core import sessions_viewed_exp
from chalicelib.utils import pg_client
def view_session(project_id, user_id, session_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""INSERT INTO public.user_viewed_sessions (user_id, session_id)
VALUES (%(userId)s,%(sessionId)s)
ON CONFLICT DO NOTHING;""",
{"userId": user_id, "sessionId": session_id})
)
sessions_viewed_exp.view_session(project_id=project_id, user_id=user_id, session_id=session_id)

View file

@ -1,11 +1,15 @@
from chalicelib.utils import ch_client, exp_ch_helper
import logging
from decouple import config
from chalicelib.core.sessions.sessions_viewed import *
_view_session = view_session
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
def view_session(project_id, user_id, session_id):
_view_session(project_id=project_id, user_id=user_id, session_id=session_id)
try:
with ch_client.ClickHouseClient() as cur:
query = f"""INSERT INTO {exp_ch_helper.get_user_viewed_sessions_table()}(project_id, user_id, session_id)

View file

@ -12,28 +12,35 @@ rm -rf ./chalicelib/core/authorizers.py
rm -rf ./chalicelib/core/autocomplete
rm -rf ./chalicelib/core/collaborations
rm -rf ./chalicelib/core/countries.py
rm -rf ./chalicelib/core/metrics.py
rm -rf ./chalicelib/core/custom_metrics.py
rm -rf ./chalicelib/core/custom_metrics_predefined.py
rm -rf ./chalicelib/core/dashboards.py
rm -rf ./chalicelib/core/errors_favorite.py
rm -rf ./chalicelib/core/metrics/metrics.py
rm -rf ./chalicelib/core/metrics/custom_metrics.py
rm -rf ./chalicelib/core/metrics/custom_metrics_predefined.py
rm -rf ./chalicelib/core/metrics/funnels.py
rm -rf ./chalicelib/core/metrics/dashboards.py
rm -rf ./chalicelib/core/metrics/heatmaps.py
rm -rf ./chalicelib/core/metrics/heatmaps_ch.py
rm -rf ./chalicelib/core/metrics/metrics_ch.py
rm -rf ./chalicelib/core/events.py
rm -rf ./chalicelib/core/events_mobile.py
rm -rf ./chalicelib/core/feature_flags.py
rm -rf ./chalicelib/core/funnels.py
rm -rf ./chalicelib/core/issue_tracking/*.py
rm -rf ./chalicelib/core/issue_tracking
rm -rf ./chalicelib/core/integrations_manager.py
rm -rf ./chalicelib/core/issues.py
rm -rf ./chalicelib/core/jobs.py
rm -rf ./chalicelib/core/log_tools/*.py
rm -rf ./chalicelib/core/log_tools
rm -rf ./chalicelib/core/metadata.py
rm -rf ./chalicelib/core/mobile.py
rm -rf ./chalicelib/core/saved_search.py
rm -rf ./chalicelib/core/sessions/sessions.py
rm -rf ./chalicelib/core/sessions/sessions_ch.py
rm -rf ./chalicelib/core/sessions/sessions_devtool.py
rm -rf ./chalicelib/core/sessions/sessions_favorite.py
rm -rf ./chalicelib/core/sessions/sessions_assignments.py
rm -rf ./chalicelib/core/sessions/sessions_metas.py
rm -rf ./chalicelib/core/sessions/sessions_mobs.py
rm -rf ./chalicelib/core/sessions/sessions_replay.py
rm -rf ./chalicelib/core/sessions/performance_event.py
rm -rf ./chalicelib/core/sessions/sessions_viewed.py
rm -rf ./chalicelib/core/sessions/unprocessed_sessions.py
rm -rf ./chalicelib/core/significance.py
rm -rf ./chalicelib/core/socket_ios.py
@ -44,6 +51,7 @@ rm -rf ./chalicelib/core/tags.py
rm -rf ./chalicelib/saml
rm -rf ./chalicelib/utils/__init__.py
rm -rf ./chalicelib/utils/args_transformer.py
rm -rf ./chalicelib/core/boarding.py
rm -rf ./chalicelib/core/canvas.py
rm -rf ./chalicelib/utils/captcha.py
rm -rf ./chalicelib/utils/dev.py
@ -96,3 +104,8 @@ rm -rf ./chalicelib/core/alerts/alerts_processor.py
rm -rf ./chalicelib/core/alerts/alerts_processor_ch.py
rm -rf ./chalicelib/core/alerts/alerts_listener.py
rm -rf ./chalicelib/core/alerts/modules/helpers.py
rm -rf /chalicelib/core/errors/modules
rm -rf /chalicelib/core/errors/errors.py
rm -rf /chalicelib/core/errors/errors_ch.py
rm -rf /chalicelib/core/errors/errors_favorite.py
rm -rf /chalicelib/core/errors/errors_viewed.py

View file

@ -8,7 +8,9 @@ from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Re
import schemas
from chalicelib.core import scope
from chalicelib.core import assist, heatmaps, errors, errors_viewed, errors_favorite, signup, feature_flags
from chalicelib.core import assist, signup, feature_flags
from chalicelib.core.errors import errors, errors_viewed, errors_favorite
from chalicelib.core.metrics import heatmaps
from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_assignments, \
sessions_viewed, unprocessed_sessions
from chalicelib.core import tenants, users, projects, license

View file

@ -1,7 +1,7 @@
from typing import Union
import schemas
from chalicelib.core import dashboards, custom_metrics
from chalicelib.core.metrics import dashboards, custom_metrics
from fastapi import Body, Depends
from or_dependencies import OR_context, OR_scope
from routers.base import get_routers