Merge branch 'dev' into product-analytics-go
This commit is contained in:
commit
5640913e68
245 changed files with 4504 additions and 4695 deletions
21
api/Pipfile
21
api/Pipfile
|
|
@ -4,21 +4,24 @@ verify_ssl = true
|
|||
name = "pypi"
|
||||
|
||||
[packages]
|
||||
urllib3 = "==1.26.16"
|
||||
sqlparse = "==0.5.2"
|
||||
urllib3 = "==2.2.3"
|
||||
requests = "==2.32.3"
|
||||
boto3 = "==1.35.60"
|
||||
pyjwt = "==2.9.0"
|
||||
boto3 = "==1.35.76"
|
||||
pyjwt = "==2.10.1"
|
||||
psycopg2-binary = "==2.9.10"
|
||||
psycopg = {extras = ["pool", "binary"], version = "==3.2.3"}
|
||||
psycopg = {extras = ["binary", "pool"], version = "==3.2.3"}
|
||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
|
||||
clickhouse-connect = "==0.8.9"
|
||||
elasticsearch = "==8.16.0"
|
||||
jira = "==3.8.0"
|
||||
cachetools = "==5.5.0"
|
||||
fastapi = "==0.115.5"
|
||||
uvicorn = {extras = ["standard"], version = "==0.32.0"}
|
||||
fastapi = "==0.115.6"
|
||||
uvicorn = {extras = ["standard"], version = "==0.32.1"}
|
||||
python-decouple = "==3.8"
|
||||
pydantic = {extras = ["email"], version = "==2.9.2"}
|
||||
apscheduler = "==3.10.4"
|
||||
redis = "==5.2.0"
|
||||
pydantic = {extras = ["email"], version = "==2.10.3"}
|
||||
apscheduler = "==3.11.0"
|
||||
redis = "==5.2.1"
|
||||
|
||||
[dev-packages]
|
||||
|
||||
|
|
|
|||
10
api/app.py
10
api/app.py
|
|
@ -13,17 +13,16 @@ from psycopg.rows import dict_row
|
|||
from starlette.responses import StreamingResponse
|
||||
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.utils import pg_client, ch_client
|
||||
from crons import core_crons, core_dynamic_crons
|
||||
from routers import core, core_dynamic
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
|
||||
|
||||
loglevel = config("LOGLEVEL", default=logging.WARNING)
|
||||
print(f">Loglevel set to: {loglevel}")
|
||||
logging.basicConfig(level=loglevel)
|
||||
|
||||
|
||||
|
||||
class ORPYAsyncConnection(AsyncConnection):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
|
@ -39,6 +38,7 @@ async def lifespan(app: FastAPI):
|
|||
|
||||
app.schedule = AsyncIOScheduler()
|
||||
await pg_client.init()
|
||||
await ch_client.init()
|
||||
app.schedule.start()
|
||||
|
||||
for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs:
|
||||
|
|
@ -128,3 +128,7 @@ app.include_router(usability_tests.app_apikey)
|
|||
app.include_router(spot.public_app)
|
||||
app.include_router(spot.app)
|
||||
app.include_router(spot.app_apikey)
|
||||
|
||||
app.include_router(product_anaytics.public_app)
|
||||
app.include_router(product_anaytics.app)
|
||||
app.include_router(product_anaytics.app_apikey)
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
|||
from decouple import config
|
||||
from fastapi import FastAPI
|
||||
|
||||
from chalicelib.core import alerts_processor
|
||||
from chalicelib.core.alerts import alerts_processor
|
||||
from chalicelib.utils import pg_client
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -45,8 +45,6 @@ class JWTAuth(HTTPBearer):
|
|||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid authentication scheme.")
|
||||
jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials)
|
||||
logger.info("------ jwt_payload ------")
|
||||
logger.info(jwt_payload)
|
||||
auth_exists = jwt_payload is not None and users.auth_exists(user_id=jwt_payload.get("userId", -1),
|
||||
jwt_iat=jwt_payload.get("iat", 100))
|
||||
if jwt_payload is None \
|
||||
|
|
@ -120,8 +118,7 @@ class JWTAuth(HTTPBearer):
|
|||
jwt_payload = None
|
||||
else:
|
||||
jwt_payload = authorizers.jwt_refresh_authorizer(scheme="Bearer", token=request.cookies["spotRefreshToken"])
|
||||
logger.info("__process_spot_refresh_call")
|
||||
logger.info(jwt_payload)
|
||||
|
||||
if jwt_payload is None or jwt_payload.get("jti") is None:
|
||||
logger.warning("Null spotRefreshToken's payload, or null JTI.")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
|
||||
|
|
|
|||
10
api/chalicelib/core/alerts/__init__.py
Normal file
10
api/chalicelib/core/alerts/__init__.py
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
if config("EXP_ALERTS", cast=bool, default=False):
|
||||
logging.info(">>> Using experimental alerts")
|
||||
from . import alerts_processor_ch as alerts_processor
|
||||
else:
|
||||
from . import alerts_processor as alerts_processor
|
||||
|
|
@ -7,8 +7,8 @@ from decouple import config
|
|||
|
||||
import schemas
|
||||
from chalicelib.core import notifications, webhook
|
||||
from chalicelib.core.collaboration_msteams import MSTeams
|
||||
from chalicelib.core.collaboration_slack import Slack
|
||||
from chalicelib.core.collaborations.collaboration_msteams import MSTeams
|
||||
from chalicelib.core.collaborations.collaboration_slack import Slack
|
||||
from chalicelib.utils import pg_client, helper, email_helper, smtp
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
|
|
@ -1,9 +1,10 @@
|
|||
from chalicelib.core.alerts.modules import TENANT_ID
|
||||
from chalicelib.utils import pg_client, helper
|
||||
|
||||
|
||||
def get_all_alerts():
|
||||
with pg_client.PostgresClient(long_query=True) as cur:
|
||||
query = """SELECT -1 AS tenant_id,
|
||||
query = f"""SELECT {TENANT_ID} AS tenant_id,
|
||||
alert_id,
|
||||
projects.project_id,
|
||||
projects.name AS project_name,
|
||||
|
|
@ -1,16 +1,15 @@
|
|||
import decimal
|
||||
import logging
|
||||
|
||||
from pydantic_core._pydantic_core import ValidationError
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import alerts
|
||||
from chalicelib.core import alerts_listener
|
||||
from chalicelib.core import sessions
|
||||
from chalicelib.core.alerts import alerts, alerts_listener
|
||||
from chalicelib.core.alerts.modules import sessions, alert_helpers
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
LeftToDb = {
|
||||
schemas.AlertColumn.PERFORMANCE__DOM_CONTENT_LOADED__AVERAGE: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
|
|
@ -46,35 +45,6 @@ LeftToDb = {
|
|||
"formula": "COUNT(DISTINCT session_id)", "condition": "source!='js_exception'", "joinSessions": False},
|
||||
}
|
||||
|
||||
# This is the frequency of execution for each threshold
|
||||
TimeInterval = {
|
||||
15: 3,
|
||||
30: 5,
|
||||
60: 10,
|
||||
120: 20,
|
||||
240: 30,
|
||||
1440: 60,
|
||||
}
|
||||
|
||||
|
||||
def can_check(a) -> bool:
|
||||
now = TimeUTC.now()
|
||||
|
||||
repetitionBase = a["options"]["currentPeriod"] \
|
||||
if a["detectionMethod"] == schemas.AlertDetectionMethod.CHANGE \
|
||||
and a["options"]["currentPeriod"] > a["options"]["previousPeriod"] \
|
||||
else a["options"]["previousPeriod"]
|
||||
|
||||
if TimeInterval.get(repetitionBase) is None:
|
||||
logger.error(f"repetitionBase: {repetitionBase} NOT FOUND")
|
||||
return False
|
||||
|
||||
return (a["options"]["renotifyInterval"] <= 0 or
|
||||
a["options"].get("lastNotification") is None or
|
||||
a["options"]["lastNotification"] <= 0 or
|
||||
((now - a["options"]["lastNotification"]) > a["options"]["renotifyInterval"] * 60 * 1000)) \
|
||||
and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000
|
||||
|
||||
|
||||
def Build(a):
|
||||
now = TimeUTC.now()
|
||||
|
|
@ -165,7 +135,7 @@ def process():
|
|||
all_alerts = alerts_listener.get_all_alerts()
|
||||
with pg_client.PostgresClient() as cur:
|
||||
for alert in all_alerts:
|
||||
if can_check(alert):
|
||||
if alert_helpers.can_check(alert):
|
||||
query, params = Build(alert)
|
||||
try:
|
||||
query = cur.mogrify(query, params)
|
||||
|
|
@ -181,7 +151,7 @@ def process():
|
|||
result = cur.fetchone()
|
||||
if result["valid"]:
|
||||
logger.info(f"Valid alert, notifying users, alertId:{alert['alertId']} name: {alert['name']}")
|
||||
notifications.append(generate_notification(alert, result))
|
||||
notifications.append(alert_helpers.generate_notification(alert, result))
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"!!!Error while running alert query for alertId:{alert['alertId']} name: {alert['name']}")
|
||||
|
|
@ -195,42 +165,3 @@ def process():
|
|||
WHERE alert_id IN %(ids)s;""", {"ids": tuple([n["alertId"] for n in notifications])}))
|
||||
if len(notifications) > 0:
|
||||
alerts.process_notifications(notifications)
|
||||
|
||||
|
||||
def __format_value(x):
|
||||
if x % 1 == 0:
|
||||
x = int(x)
|
||||
else:
|
||||
x = round(x, 2)
|
||||
return f"{x:,}"
|
||||
|
||||
|
||||
def generate_notification(alert, result):
|
||||
left = __format_value(result['value'])
|
||||
right = __format_value(alert['query']['right'])
|
||||
return {
|
||||
"alertId": alert["alertId"],
|
||||
"tenantId": alert["tenantId"],
|
||||
"title": alert["name"],
|
||||
"description": f"{alert['seriesName']} = {left} ({alert['query']['operator']} {right}).",
|
||||
"buttonText": "Check metrics for more details",
|
||||
"buttonUrl": f"/{alert['projectId']}/metrics",
|
||||
"imageUrl": None,
|
||||
"projectId": alert["projectId"],
|
||||
"projectName": alert["projectName"],
|
||||
"options": {"source": "ALERT", "sourceId": alert["alertId"],
|
||||
"sourceMeta": alert["detectionMethod"],
|
||||
"message": alert["options"]["message"], "projectId": alert["projectId"],
|
||||
"data": {"title": alert["name"],
|
||||
"limitValue": alert["query"]["right"],
|
||||
"actualValue": float(result["value"]) \
|
||||
if isinstance(result["value"], decimal.Decimal) \
|
||||
else result["value"],
|
||||
"operator": alert["query"]["operator"],
|
||||
"trigger": alert["query"]["left"],
|
||||
"alertId": alert["alertId"],
|
||||
"detectionMethod": alert["detectionMethod"],
|
||||
"currentPeriod": alert["options"]["currentPeriod"],
|
||||
"previousPeriod": alert["options"]["previousPeriod"],
|
||||
"createdAt": TimeUTC.now()}},
|
||||
}
|
||||
|
|
@ -3,9 +3,8 @@ import logging
|
|||
from pydantic_core._pydantic_core import ValidationError
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import alerts
|
||||
from chalicelib.core import alerts_listener, alerts_processor
|
||||
from chalicelib.core import sessions_exp as sessions
|
||||
from chalicelib.core.alerts import alerts, alerts_listener
|
||||
from chalicelib.core.alerts.modules import sessions, alert_helpers
|
||||
from chalicelib.utils import pg_client, ch_client, exp_ch_helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
|
|
@ -162,7 +161,7 @@ def process():
|
|||
for alert in all_alerts:
|
||||
if alert["query"]["left"] != "CUSTOM":
|
||||
continue
|
||||
if alerts_processor.can_check(alert):
|
||||
if alert_helpers.can_check(alert):
|
||||
query, params = Build(alert)
|
||||
try:
|
||||
query = ch_cur.format(query, params)
|
||||
|
|
@ -180,7 +179,7 @@ def process():
|
|||
|
||||
if result["valid"]:
|
||||
logger.info("Valid alert, notifying users")
|
||||
notifications.append(alerts_processor.generate_notification(alert, result))
|
||||
notifications.append(alert_helpers.generate_notification(alert, result))
|
||||
except Exception as e:
|
||||
logger.error(f"!!!Error while running alert query for alertId:{alert['alertId']}")
|
||||
logger.error(str(e))
|
||||
9
api/chalicelib/core/alerts/modules/__init__.py
Normal file
9
api/chalicelib/core/alerts/modules/__init__.py
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
from decouple import config
|
||||
|
||||
TENANT_ID = "-1"
|
||||
if config("EXP_ALERTS", cast=bool, default=False):
|
||||
from chalicelib.core.sessions import sessions_ch as sessions
|
||||
else:
|
||||
from chalicelib.core.sessions import sessions
|
||||
|
||||
from . import helpers as alert_helpers
|
||||
74
api/chalicelib/core/alerts/modules/helpers.py
Normal file
74
api/chalicelib/core/alerts/modules/helpers.py
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
import decimal
|
||||
import logging
|
||||
|
||||
import schemas
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
# This is the frequency of execution for each threshold
|
||||
TimeInterval = {
|
||||
15: 3,
|
||||
30: 5,
|
||||
60: 10,
|
||||
120: 20,
|
||||
240: 30,
|
||||
1440: 60,
|
||||
}
|
||||
|
||||
|
||||
def __format_value(x):
|
||||
if x % 1 == 0:
|
||||
x = int(x)
|
||||
else:
|
||||
x = round(x, 2)
|
||||
return f"{x:,}"
|
||||
|
||||
|
||||
def can_check(a) -> bool:
|
||||
now = TimeUTC.now()
|
||||
|
||||
repetitionBase = a["options"]["currentPeriod"] \
|
||||
if a["detectionMethod"] == schemas.AlertDetectionMethod.CHANGE \
|
||||
and a["options"]["currentPeriod"] > a["options"]["previousPeriod"] \
|
||||
else a["options"]["previousPeriod"]
|
||||
|
||||
if TimeInterval.get(repetitionBase) is None:
|
||||
logger.error(f"repetitionBase: {repetitionBase} NOT FOUND")
|
||||
return False
|
||||
|
||||
return (a["options"]["renotifyInterval"] <= 0 or
|
||||
a["options"].get("lastNotification") is None or
|
||||
a["options"]["lastNotification"] <= 0 or
|
||||
((now - a["options"]["lastNotification"]) > a["options"]["renotifyInterval"] * 60 * 1000)) \
|
||||
and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000
|
||||
|
||||
|
||||
def generate_notification(alert, result):
|
||||
left = __format_value(result['value'])
|
||||
right = __format_value(alert['query']['right'])
|
||||
return {
|
||||
"alertId": alert["alertId"],
|
||||
"tenantId": alert["tenantId"],
|
||||
"title": alert["name"],
|
||||
"description": f"{alert['seriesName']} = {left} ({alert['query']['operator']} {right}).",
|
||||
"buttonText": "Check metrics for more details",
|
||||
"buttonUrl": f"/{alert['projectId']}/metrics",
|
||||
"imageUrl": None,
|
||||
"projectId": alert["projectId"],
|
||||
"projectName": alert["projectName"],
|
||||
"options": {"source": "ALERT", "sourceId": alert["alertId"],
|
||||
"sourceMeta": alert["detectionMethod"],
|
||||
"message": alert["options"]["message"], "projectId": alert["projectId"],
|
||||
"data": {"title": alert["name"],
|
||||
"limitValue": alert["query"]["right"],
|
||||
"actualValue": float(result["value"]) \
|
||||
if isinstance(result["value"], decimal.Decimal) \
|
||||
else result["value"],
|
||||
"operator": alert["query"]["operator"],
|
||||
"trigger": alert["query"]["left"],
|
||||
"alertId": alert["alertId"],
|
||||
"detectionMethod": alert["detectionMethod"],
|
||||
"currentPeriod": alert["options"]["currentPeriod"],
|
||||
"previousPeriod": alert["options"]["previousPeriod"],
|
||||
"createdAt": TimeUTC.now()}},
|
||||
}
|
||||
11
api/chalicelib/core/autocomplete/__init__.py
Normal file
11
api/chalicelib/core/autocomplete/__init__.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
|
||||
if config("EXP_AUTOCOMPLETE", cast=bool, default=False):
|
||||
logging.info(">>> Using experimental autocomplete")
|
||||
from . import autocomplete_ch as autocomplete
|
||||
else:
|
||||
from . import autocomplete
|
||||
|
|
@ -61,11 +61,11 @@ def __get_autocomplete_table(value, project_id):
|
|||
try:
|
||||
cur.execute(query)
|
||||
except Exception as err:
|
||||
print("--------- AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------")
|
||||
print(query.decode('UTF-8'))
|
||||
print("--------- VALUE -----------")
|
||||
print(value)
|
||||
print("--------------------")
|
||||
logger.exception("--------- AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------")
|
||||
logger.exception(query.decode('UTF-8'))
|
||||
logger.exception("--------- VALUE -----------")
|
||||
logger.exception(value)
|
||||
logger.exception("--------------------")
|
||||
raise err
|
||||
results = cur.fetchall()
|
||||
for r in results:
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
import logging
|
||||
import schemas
|
||||
from chalicelib.core import countries, events, metadata
|
||||
from chalicelib.utils import ch_client
|
||||
|
|
@ -5,6 +6,7 @@ from chalicelib.utils import helper, exp_ch_helper
|
|||
from chalicelib.utils.event_filter_definition import Event
|
||||
from chalicelib.utils.or_cache import CachedResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
TABLE = "experimental.autocomplete"
|
||||
|
||||
|
||||
|
|
@ -59,13 +61,13 @@ def __get_autocomplete_table(value, project_id):
|
|||
try:
|
||||
results = cur.execute(query=query, params=params)
|
||||
except Exception as err:
|
||||
print("--------- CH AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------")
|
||||
print(cur.format(query=query, params=params))
|
||||
print("--------- PARAMS -----------")
|
||||
print(params)
|
||||
print("--------- VALUE -----------")
|
||||
print(value)
|
||||
print("--------------------")
|
||||
logger.exception("--------- CH AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------")
|
||||
logger.exception(cur.format(query=query, params=params))
|
||||
logger.exception("--------- PARAMS -----------")
|
||||
logger.exception(params)
|
||||
logger.exception("--------- VALUE -----------")
|
||||
logger.exception(value)
|
||||
logger.exception("--------------------")
|
||||
raise err
|
||||
for r in results:
|
||||
r["type"] = r.pop("_type")
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
from chalicelib.utils import pg_client
|
||||
from chalicelib.core import projects, log_tool_datadog, log_tool_stackdriver, log_tool_sentry
|
||||
from chalicelib.core import projects
|
||||
from chalicelib.core.log_tools import datadog, stackdriver, sentry
|
||||
|
||||
from chalicelib.core import users
|
||||
|
||||
|
|
@ -49,9 +50,9 @@ def get_state(tenant_id):
|
|||
"done": len(users.get_members(tenant_id=tenant_id)) > 1,
|
||||
"URL": "https://app.openreplay.com/client/manage-users"},
|
||||
{"task": "Integrations",
|
||||
"done": len(log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \
|
||||
or len(log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \
|
||||
or len(log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0,
|
||||
"done": len(datadog.get_all(tenant_id=tenant_id)) > 0 \
|
||||
or len(sentry.get_all(tenant_id=tenant_id)) > 0 \
|
||||
or len(stackdriver.get_all(tenant_id=tenant_id)) > 0,
|
||||
"URL": "https://docs.openreplay.com/integrations"}
|
||||
]
|
||||
|
||||
|
|
@ -108,7 +109,7 @@ def get_state_manage_users(tenant_id):
|
|||
|
||||
def get_state_integrations(tenant_id):
|
||||
return {"task": "Integrations",
|
||||
"done": len(log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \
|
||||
or len(log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \
|
||||
or len(log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0,
|
||||
"done": len(datadog.get_all(tenant_id=tenant_id)) > 0 \
|
||||
or len(sentry.get_all(tenant_id=tenant_id)) > 0 \
|
||||
or len(stackdriver.get_all(tenant_id=tenant_id)) > 0,
|
||||
"URL": "https://docs.openreplay.com/integrations"}
|
||||
|
|
|
|||
1
api/chalicelib/core/collaborations/__init__.py
Normal file
1
api/chalicelib/core/collaborations/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
from . import collaboration_base as _
|
||||
|
|
@ -6,7 +6,7 @@ from fastapi import HTTPException, status
|
|||
|
||||
import schemas
|
||||
from chalicelib.core import webhook
|
||||
from chalicelib.core.collaboration_base import BaseCollaboration
|
||||
from chalicelib.core.collaborations.collaboration_base import BaseCollaboration
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -6,7 +6,7 @@ from fastapi import HTTPException, status
|
|||
|
||||
import schemas
|
||||
from chalicelib.core import webhook
|
||||
from chalicelib.core.collaboration_base import BaseCollaboration
|
||||
from chalicelib.core.collaborations.collaboration_base import BaseCollaboration
|
||||
|
||||
|
||||
class Slack(BaseCollaboration):
|
||||
|
|
@ -4,28 +4,14 @@ import logging
|
|||
from fastapi import HTTPException, status
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import sessions, funnels, errors, issues, heatmaps, product_analytics, \
|
||||
custom_metrics_predefined
|
||||
from chalicelib.core import funnels, errors, issues, heatmaps, product_analytics, custom_metrics_predefined
|
||||
from chalicelib.core.sessions import sessions
|
||||
from chalicelib.utils import helper, pg_client
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# TODO: refactor this to split
|
||||
# timeseries /
|
||||
# table of errors / table of issues / table of browsers / table of devices / table of countries / table of URLs
|
||||
# remove "table of" calls from this function
|
||||
def __try_live(project_id, data: schemas.CardSchema):
|
||||
results = []
|
||||
for i, s in enumerate(data.series):
|
||||
results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
|
||||
view_type=data.view_type, metric_type=data.metric_type,
|
||||
metric_of=data.metric_of, metric_value=data.metric_value))
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def __get_table_of_series(project_id, data: schemas.CardSchema):
|
||||
results = []
|
||||
for i, s in enumerate(data.series):
|
||||
|
|
@ -43,9 +29,6 @@ def __get_funnel_chart(project: schemas.ProjectContext, data: schemas.CardFunnel
|
|||
"totalDropDueToIssues": 0
|
||||
}
|
||||
|
||||
# return funnels.get_top_insights_on_the_fly_widget(project_id=project_id,
|
||||
# data=data.series[0].filter,
|
||||
# metric_format=data.metric_format)
|
||||
return funnels.get_simple_funnel(project=project,
|
||||
data=data.series[0].filter,
|
||||
metric_format=data.metric_format)
|
||||
|
|
@ -93,7 +76,12 @@ def __get_path_analysis_chart(project: schemas.ProjectContext, user_id: int, dat
|
|||
|
||||
|
||||
def __get_timeseries_chart(project: schemas.ProjectContext, data: schemas.CardTimeSeries, user_id: int = None):
|
||||
series_charts = __try_live(project_id=project.project_id, data=data)
|
||||
series_charts = []
|
||||
for i, s in enumerate(data.series):
|
||||
series_charts.append(sessions.search2_series(data=s.filter, project_id=project.project_id, density=data.density,
|
||||
view_type=data.view_type, metric_type=data.metric_type,
|
||||
metric_of=data.metric_of, metric_value=data.metric_value))
|
||||
|
||||
results = [{}] * len(series_charts[0])
|
||||
for i in range(len(results)):
|
||||
for j, series_chart in enumerate(series_charts):
|
||||
|
|
@ -173,19 +161,12 @@ def get_chart(project: schemas.ProjectContext, data: schemas.CardSchema, user_id
|
|||
schemas.MetricType.TABLE: __get_table_chart,
|
||||
schemas.MetricType.HEAT_MAP: __get_heat_map_chart,
|
||||
schemas.MetricType.FUNNEL: __get_funnel_chart,
|
||||
schemas.MetricType.INSIGHTS: not_supported,
|
||||
schemas.MetricType.PATH_ANALYSIS: __get_path_analysis_chart
|
||||
}
|
||||
return supported.get(data.metric_type, not_supported)(project=project, data=data, user_id=user_id)
|
||||
|
||||
|
||||
def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
|
||||
# No need for this because UI is sending the full payload
|
||||
# card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
# if card is None:
|
||||
# return None
|
||||
# metric: schemas.CardSchema = schemas.CardSchema(**card)
|
||||
# metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||
if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id):
|
||||
return None
|
||||
results = []
|
||||
|
|
@ -220,7 +201,6 @@ def get_issues(project: schemas.ProjectContext, user_id: int, data: schemas.Card
|
|||
schemas.MetricType.TIMESERIES: not_supported,
|
||||
schemas.MetricType.TABLE: not_supported,
|
||||
schemas.MetricType.HEAT_MAP: not_supported,
|
||||
schemas.MetricType.INSIGHTS: not_supported,
|
||||
schemas.MetricType.PATH_ANALYSIS: not_supported,
|
||||
}
|
||||
return supported.get(data.metric_type, not_supported)()
|
||||
|
|
@ -555,17 +535,7 @@ def change_state(project_id, metric_id, user_id, status):
|
|||
|
||||
|
||||
def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
|
||||
data: schemas.CardSessionsSchema
|
||||
# , range_value=None, start_date=None, end_date=None
|
||||
):
|
||||
# No need for this because UI is sending the full payload
|
||||
# card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
# if card is None:
|
||||
# return None
|
||||
# metric: schemas.CardSchema = schemas.CardSchema(**card)
|
||||
# metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||
# if metric is None:
|
||||
# return None
|
||||
data: schemas.CardSessionsSchema):
|
||||
if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id):
|
||||
return None
|
||||
for s in data.series:
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
from typing import Optional
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import autocomplete
|
||||
from chalicelib.core.autocomplete import autocomplete
|
||||
from chalicelib.core import issues
|
||||
from chalicelib.core import sessions_metas
|
||||
from chalicelib.core.sessions import sessions_metas
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.event_filter_definition import SupportedFilter, Event
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
import logging
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import sessions_mobs, sessions
|
||||
from chalicelib.core import sessions
|
||||
from chalicelib.core.sessions import sessions_mobs
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
|
|
|
|||
0
api/chalicelib/core/issue_tracking/__init__.py
Normal file
0
api/chalicelib/core/issue_tracking/__init__.py
Normal file
|
|
@ -1,6 +1,6 @@
|
|||
import schemas
|
||||
from chalicelib.core import integration_base
|
||||
from chalicelib.core.integration_github_issue import GithubIntegrationIssue
|
||||
from chalicelib.core.issue_tracking import integration_base
|
||||
from chalicelib.core.issue_tracking.integration_github_issue import GithubIntegrationIssue
|
||||
from chalicelib.utils import pg_client, helper
|
||||
|
||||
PROVIDER = schemas.IntegrationType.GITHUB
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
from chalicelib.core.integration_base_issue import BaseIntegrationIssue
|
||||
from chalicelib.core.issue_tracking.integration_base_issue import BaseIntegrationIssue
|
||||
from chalicelib.utils import github_client_v3
|
||||
from chalicelib.utils.github_client_v3 import github_formatters as formatter
|
||||
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
import schemas
|
||||
from chalicelib.core.issue_tracking.modules import TENANT_CONDITION
|
||||
from chalicelib.utils import pg_client
|
||||
|
||||
|
||||
|
|
@ -51,10 +52,10 @@ def get_global_integrations_status(tenant_id, user_id, project_id):
|
|||
AND provider='elasticsearch')) AS {schemas.IntegrationType.ELASTICSEARCH.value},
|
||||
EXISTS((SELECT 1
|
||||
FROM public.webhooks
|
||||
WHERE type='slack' AND deleted_at ISNULL)) AS {schemas.IntegrationType.SLACK.value},
|
||||
WHERE type='slack' AND deleted_at ISNULL AND {TENANT_CONDITION})) AS {schemas.IntegrationType.SLACK.value},
|
||||
EXISTS((SELECT 1
|
||||
FROM public.webhooks
|
||||
WHERE type='msteams' AND deleted_at ISNULL)) AS {schemas.IntegrationType.MS_TEAMS.value},
|
||||
WHERE type='msteams' AND deleted_at ISNULL AND {TENANT_CONDITION})) AS {schemas.IntegrationType.MS_TEAMS.value},
|
||||
EXISTS((SELECT 1
|
||||
FROM public.integrations
|
||||
WHERE project_id=%(project_id)s AND provider='dynatrace')) AS {schemas.IntegrationType.DYNATRACE.value};""",
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
from chalicelib.core import integration_github, integration_jira_cloud
|
||||
from chalicelib.core.issue_tracking import integration_github, integration_jira_cloud
|
||||
from chalicelib.utils import pg_client
|
||||
|
||||
SUPPORTED_TOOLS = [integration_github.PROVIDER, integration_jira_cloud.PROVIDER]
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
import schemas
|
||||
from chalicelib.core import integration_base
|
||||
from chalicelib.core.integration_jira_cloud_issue import JIRACloudIntegrationIssue
|
||||
from chalicelib.core.issue_tracking import integration_base
|
||||
from chalicelib.core.issue_tracking.integration_jira_cloud_issue import JIRACloudIntegrationIssue
|
||||
from chalicelib.utils import pg_client, helper
|
||||
|
||||
PROVIDER = schemas.IntegrationType.JIRA
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
from chalicelib.utils import jira_client
|
||||
from chalicelib.core.integration_base_issue import BaseIntegrationIssue
|
||||
from chalicelib.core.issue_tracking.integration_base_issue import BaseIntegrationIssue
|
||||
|
||||
|
||||
class JIRACloudIntegrationIssue(BaseIntegrationIssue):
|
||||
1
api/chalicelib/core/issue_tracking/modules/__init__.py
Normal file
1
api/chalicelib/core/issue_tracking/modules/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
TENANT_CONDITION = "TRUE"
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.core import sessions_mobs, sessions_devtool
|
||||
from chalicelib.core.sessions import sessions_mobs, sessions_devtool
|
||||
|
||||
|
||||
class Actions:
|
||||
|
|
|
|||
0
api/chalicelib/core/log_tools/__init__.py
Normal file
0
api/chalicelib/core/log_tools/__init__.py
Normal file
|
|
@ -1,5 +1,6 @@
|
|||
from chalicelib.utils import pg_client, helper
|
||||
import json
|
||||
from chalicelib.core.log_tools.modules import TENANT_CONDITION
|
||||
|
||||
EXCEPT = ["jira_server", "jira_cloud"]
|
||||
|
||||
|
|
@ -94,11 +95,11 @@ def get_all_by_tenant(tenant_id, integration):
|
|||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""SELECT integrations.*
|
||||
f"""SELECT integrations.*
|
||||
FROM public.integrations INNER JOIN public.projects USING(project_id)
|
||||
WHERE provider = %(provider)s
|
||||
WHERE provider = %(provider)s AND {TENANT_CONDITION}
|
||||
AND projects.deleted_at ISNULL;""",
|
||||
{"provider": integration})
|
||||
{"tenant_id": tenant_id, "provider": integration})
|
||||
)
|
||||
r = cur.fetchall()
|
||||
return helper.list_to_camel_case(r, flatten=True)
|
||||
1
api/chalicelib/core/log_tools/modules/__init__.py
Normal file
1
api/chalicelib/core/log_tools/modules/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
TENANT_CONDITION = "TRUE"
|
||||
14
api/chalicelib/core/product_anaytics2.py
Normal file
14
api/chalicelib/core/product_anaytics2.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
|
||||
|
||||
def search_events(project_id: int, data: dict):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT *
|
||||
FROM taha.events
|
||||
WHERE project_id=%(project_id)s
|
||||
ORDER BY created_at;""",
|
||||
params={"project_id": project_id})
|
||||
x = ch_client.execute(r)
|
||||
|
||||
return x
|
||||
0
api/chalicelib/core/sessions/__init__.py
Normal file
0
api/chalicelib/core/sessions/__init__.py
Normal file
|
|
@ -2,7 +2,8 @@ import logging
|
|||
from typing import List, Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata, projects, performance_event, sessions_favorite
|
||||
from chalicelib.core import events, metadata, projects
|
||||
from chalicelib.core.sessions import sessions_favorite, performance_event
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
|
|
@ -2,7 +2,7 @@ from decouple import config
|
|||
from chalicelib.utils import helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.core import integrations_manager, integration_base_issue
|
||||
from chalicelib.core.issue_tracking import integrations_manager, integration_base_issue
|
||||
import json
|
||||
|
||||
|
||||
|
|
@ -3,11 +3,13 @@ import logging
|
|||
from typing import List, Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata, projects, performance_event, metrics, sessions_favorite, sessions_legacy
|
||||
from chalicelib.core import events, metadata, projects, metrics, sessions
|
||||
from chalicelib.core.sessions import sessions_favorite, performance_event
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
SESSION_PROJECTION_COLS_CH = """\
|
||||
s.project_id,
|
||||
s.session_id AS session_id,
|
||||
|
|
@ -1690,24 +1692,4 @@ def check_recording_status(project_id: int) -> dict:
|
|||
# TODO: rewrite this function to use ClickHouse
|
||||
def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 'session_id',
|
||||
ascending: bool = False) -> dict:
|
||||
if session_ids is None or len(session_ids) == 0:
|
||||
return {"total": 0, "sessions": []}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
params = {"project_id": project_id, "session_ids": tuple(session_ids)}
|
||||
order_direction = 'ASC' if ascending else 'DESC'
|
||||
main_query = cur.mogrify(f"""SELECT {sessions_legacy.SESSION_PROJECTION_BASE_COLS}
|
||||
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
|
||||
FROM public.sessions AS s
|
||||
WHERE project_id=%(project_id)s
|
||||
AND session_id IN %(session_ids)s
|
||||
ORDER BY {sort_by} {order_direction};""", params)
|
||||
|
||||
cur.execute(main_query)
|
||||
rows = cur.fetchall()
|
||||
if len(meta_keys) > 0:
|
||||
for s in rows:
|
||||
s["metadata"] = {}
|
||||
for m in meta_keys:
|
||||
s["metadata"][m["key"]] = s.pop(f'metadata_{m["index"]}')
|
||||
return {"total": len(rows), "sessions": helper.list_to_camel_case(rows)}
|
||||
return sessions.search_sessions_by_ids(project_id, session_ids, sort_by, ascending)
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
import schemas
|
||||
from chalicelib.core import autocomplete
|
||||
from chalicelib.core.autocomplete import autocomplete
|
||||
from chalicelib.utils.event_filter_definition import SupportedFilter
|
||||
|
||||
SUPPORTED_TYPES = {
|
||||
|
|
@ -42,7 +42,7 @@ SUPPORTED_TYPES = {
|
|||
schemas.FilterType.UTM_SOURCE: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_SOURCE),
|
||||
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_SOURCE)),
|
||||
# IOS
|
||||
# Mobile
|
||||
schemas.FilterType.USER_OS_MOBILE: SupportedFilter(
|
||||
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS_MOBILE),
|
||||
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS_MOBILE)),
|
||||
|
|
@ -4,8 +4,8 @@ from urllib.parse import urljoin
|
|||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core.collaboration_msteams import MSTeams
|
||||
from chalicelib.core.collaboration_slack import Slack
|
||||
from chalicelib.core.collaborations.collaboration_msteams import MSTeams
|
||||
from chalicelib.core.collaborations.collaboration_slack import Slack
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
import schemas
|
||||
from chalicelib.core import events, metadata, events_mobile, \
|
||||
sessions_mobs, issues, assist, sessions_devtool, canvas, user_testing
|
||||
issues, assist, canvas, user_testing
|
||||
from chalicelib.core.sessions import sessions_mobs, sessions_devtool
|
||||
from chalicelib.utils import errors_helper
|
||||
from chalicelib.utils import pg_client, helper
|
||||
|
||||
|
|
@ -765,30 +765,6 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
|
|||
return n_critical_issues, issues_dict, total_drop_due_to_issues
|
||||
|
||||
|
||||
def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id,
|
||||
metric_format: schemas.MetricExtendedFormatType):
|
||||
output = []
|
||||
stages = filter_d.events
|
||||
|
||||
if len(stages) == 0:
|
||||
logger.debug("no stages found")
|
||||
return output, 0
|
||||
|
||||
# The result of the multi-stage query
|
||||
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
|
||||
# Obtain the first part of the output
|
||||
stages_list = get_stages(stages, rows, metric_format=metric_format)
|
||||
if len(rows) == 0:
|
||||
return stages_list, 0
|
||||
|
||||
# Obtain the second part of the output
|
||||
total_drop_due_to_issues = get_issues(stages, rows,
|
||||
first_stage=1,
|
||||
last_stage=len(filter_d.events),
|
||||
drop_only=True)
|
||||
return stages_list, total_drop_due_to_issues
|
||||
|
||||
|
||||
def get_issues_list(filter_d: schemas.CardSeriesFilterSchema, project_id, first_stage=None, last_stage=None):
|
||||
output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []})
|
||||
stages = filter_d.events
|
||||
|
|
|
|||
|
|
@ -457,12 +457,6 @@ def set_password_invitation(user_id, new_password):
|
|||
user = update(tenant_id=-1, user_id=user_id, changes=changes)
|
||||
r = authenticate(user['email'], new_password)
|
||||
|
||||
tenant_id = r.pop("tenantId")
|
||||
r["limits"] = {
|
||||
"teamMember": -1,
|
||||
"projects": -1,
|
||||
"metadata": metadata.get_remaining_metadata_with_count(tenant_id)}
|
||||
|
||||
return {
|
||||
"jwt": r.pop("jwt"),
|
||||
"refreshToken": r.pop("refreshToken"),
|
||||
|
|
@ -470,10 +464,7 @@ def set_password_invitation(user_id, new_password):
|
|||
"spotJwt": r.pop("spotJwt"),
|
||||
"spotRefreshToken": r.pop("spotRefreshToken"),
|
||||
"spotRefreshTokenMaxAge": r.pop("spotRefreshTokenMaxAge"),
|
||||
'data': {
|
||||
"scopeState": scope.get_scope(-1),
|
||||
"user": r
|
||||
}
|
||||
**r
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -129,13 +129,13 @@ def add_edit(tenant_id, data: schemas.WebhookSchema, replace_none=None):
|
|||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
|
||||
if data.webhook_id is not None:
|
||||
return update(tenant_id=tenant_id, webhook_id=data.webhook_id,
|
||||
changes={"endpoint": data.endpoint.unicode_string(),
|
||||
changes={"endpoint": data.endpoint,
|
||||
"authHeader": data.auth_header,
|
||||
"name": data.name},
|
||||
replace_none=replace_none)
|
||||
else:
|
||||
return add(tenant_id=tenant_id,
|
||||
endpoint=data.endpoint.unicode_string(),
|
||||
endpoint=data.endpoint,
|
||||
auth_header=data.auth_header,
|
||||
name=data.name,
|
||||
replace_none=replace_none)
|
||||
|
|
|
|||
|
|
@ -11,3 +11,9 @@ if smtp.has_smtp():
|
|||
logger.info("valid SMTP configuration found")
|
||||
else:
|
||||
logger.info("no SMTP configuration found or SMTP validation failed")
|
||||
|
||||
if config("EXP_CH_DRIVER", cast=bool, default=True):
|
||||
logging.info(">>> Using new CH driver")
|
||||
from . import ch_client_exp as ch_client
|
||||
else:
|
||||
from . import ch_client
|
||||
|
|
|
|||
|
|
@ -3,15 +3,15 @@ import logging
|
|||
import clickhouse_driver
|
||||
from decouple import config
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
settings = {}
|
||||
if config('ch_timeout', cast=int, default=-1) > 0:
|
||||
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
logger.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
|
||||
|
||||
if config('ch_receive_timeout', cast=int, default=-1) > 0:
|
||||
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
logger.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
|
||||
|
||||
|
||||
|
|
@ -35,20 +35,20 @@ class ClickHouseClient:
|
|||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def execute(self, query, params=None, **args):
|
||||
def execute(self, query, parameters=None, **args):
|
||||
try:
|
||||
results = self.__client.execute(query=query, params=params, with_column_types=True, **args)
|
||||
results = self.__client.execute(query=query, params=parameters, with_column_types=True, **args)
|
||||
keys = tuple(x for x, y in results[1])
|
||||
return [dict(zip(keys, i)) for i in results[0]]
|
||||
except Exception as err:
|
||||
logging.error("--------- CH EXCEPTION -----------")
|
||||
logging.error(err)
|
||||
logging.error("--------- CH QUERY EXCEPTION -----------")
|
||||
logging.error(self.format(query=query, params=params)
|
||||
.replace('\n', '\\n')
|
||||
.replace(' ', ' ')
|
||||
.replace(' ', ' '))
|
||||
logging.error("--------------------")
|
||||
logger.error("--------- CH EXCEPTION -----------")
|
||||
logger.error(err)
|
||||
logger.error("--------- CH QUERY EXCEPTION -----------")
|
||||
logger.error(self.format(query=query, parameters=parameters)
|
||||
.replace('\n', '\\n')
|
||||
.replace(' ', ' ')
|
||||
.replace(' ', ' '))
|
||||
logger.error("--------------------")
|
||||
raise err
|
||||
|
||||
def insert(self, query, params=None, **args):
|
||||
|
|
@ -57,10 +57,18 @@ class ClickHouseClient:
|
|||
def client(self):
|
||||
return self.__client
|
||||
|
||||
def format(self, query, params):
|
||||
if params is None:
|
||||
def format(self, query, parameters):
|
||||
if parameters is None:
|
||||
return query
|
||||
return self.__client.substitute_params(query, params, self.__client.connection.context)
|
||||
return self.__client.substitute_params(query, parameters, self.__client.connection.context)
|
||||
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
|
||||
|
||||
async def init():
|
||||
logger.info(f">CH_POOL:not defined")
|
||||
|
||||
|
||||
async def terminate():
|
||||
pass
|
||||
176
api/chalicelib/utils/ch_client_exp.py
Normal file
176
api/chalicelib/utils/ch_client_exp.py
Normal file
|
|
@ -0,0 +1,176 @@
|
|||
import logging
|
||||
import threading
|
||||
import time
|
||||
from functools import wraps
|
||||
from queue import Queue, Empty
|
||||
|
||||
import clickhouse_connect
|
||||
from clickhouse_connect.driver.query import QueryContext
|
||||
from clickhouse_connect.driver.exceptions import DatabaseError
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_CH_CONFIG = {"host": config("ch_host"),
|
||||
"user": config("ch_user", default="default"),
|
||||
"password": config("ch_password", default=""),
|
||||
"port": config("ch_port_http", cast=int),
|
||||
"client_name": config("APP_NAME", default="PY")}
|
||||
CH_CONFIG = dict(_CH_CONFIG)
|
||||
|
||||
settings = {}
|
||||
if config('ch_timeout', cast=int, default=-1) > 0:
|
||||
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
|
||||
|
||||
if config('ch_receive_timeout', cast=int, default=-1) > 0:
|
||||
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
|
||||
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
|
||||
|
||||
def transform_result(original_function):
|
||||
@wraps(original_function)
|
||||
def wrapper(*args, **kwargs):
|
||||
logger.info("Executing query on CH")
|
||||
result = original_function(*args, **kwargs)
|
||||
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
|
||||
column_names = result.column_names
|
||||
result = result.result_rows
|
||||
result = [dict(zip(column_names, row)) for row in result]
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class ClickHouseConnectionPool:
|
||||
def __init__(self, min_size, max_size):
|
||||
self.min_size = min_size
|
||||
self.max_size = max_size
|
||||
self.pool = Queue()
|
||||
self.lock = threading.Lock()
|
||||
self.total_connections = 0
|
||||
|
||||
# Initialize the pool with min_size connections
|
||||
for _ in range(self.min_size):
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.pool.put(client)
|
||||
self.total_connections += 1
|
||||
|
||||
def get_connection(self):
|
||||
try:
|
||||
# Try to get a connection without blocking
|
||||
client = self.pool.get_nowait()
|
||||
return client
|
||||
except Empty:
|
||||
with self.lock:
|
||||
if self.total_connections < self.max_size:
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.total_connections += 1
|
||||
return client
|
||||
# If max_size reached, wait until a connection is available
|
||||
client = self.pool.get()
|
||||
return client
|
||||
|
||||
def release_connection(self, client):
|
||||
self.pool.put(client)
|
||||
|
||||
def close_all(self):
|
||||
with self.lock:
|
||||
while not self.pool.empty():
|
||||
client = self.pool.get()
|
||||
client.close()
|
||||
self.total_connections = 0
|
||||
|
||||
|
||||
CH_pool: ClickHouseConnectionPool = None
|
||||
|
||||
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
|
||||
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
|
||||
RETRY = 0
|
||||
|
||||
|
||||
def make_pool():
|
||||
if not config('CH_POOL', cast=bool, default=True):
|
||||
return
|
||||
global CH_pool
|
||||
global RETRY
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", error)
|
||||
try:
|
||||
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
|
||||
max_size=config("CH_MAXCONN", cast=int, default=8))
|
||||
if CH_pool is not None:
|
||||
logger.info("Connection pool created successfully for CH")
|
||||
except ConnectionError as error:
|
||||
logger.error("Error while connecting to CH", error)
|
||||
if RETRY < RETRY_MAX:
|
||||
RETRY += 1
|
||||
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
time.sleep(RETRY_INTERVAL)
|
||||
make_pool()
|
||||
else:
|
||||
raise error
|
||||
|
||||
|
||||
class ClickHouseClient:
|
||||
__client = None
|
||||
|
||||
def __init__(self, database=None):
|
||||
if self.__client is None:
|
||||
if database is None and config('CH_POOL', cast=bool, default=True):
|
||||
self.__client = CH_pool.get_connection()
|
||||
else:
|
||||
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=database if database else config("ch_database",
|
||||
default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.__client.execute = transform_result(self.__client.query)
|
||||
self.__client.format = self.format
|
||||
|
||||
def __enter__(self):
|
||||
return self.__client
|
||||
|
||||
def format(self, query, *, parameters=None):
|
||||
if parameters is None:
|
||||
return query
|
||||
return query % {
|
||||
key: f"'{value}'" if isinstance(value, str) else value
|
||||
for key, value in parameters.items()
|
||||
}
|
||||
|
||||
def __exit__(self, *args):
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
CH_pool.release_connection(self.__client)
|
||||
else:
|
||||
self.__client.close()
|
||||
|
||||
|
||||
async def init():
|
||||
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
make_pool()
|
||||
|
||||
|
||||
async def terminate():
|
||||
global CH_pool
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
logger.info("Closed all connexions to CH")
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", error)
|
||||
57
api/chalicelib/utils/exp_ch_helper.py
Normal file
57
api/chalicelib/utils/exp_ch_helper.py
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
from typing import Union
|
||||
|
||||
import schemas
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_main_events_table(timestamp=0, platform="web"):
|
||||
if platform == "web":
|
||||
return "experimental.events"
|
||||
else:
|
||||
return "experimental.ios_events"
|
||||
|
||||
|
||||
def get_main_sessions_table(timestamp=0):
|
||||
return "experimental.sessions"
|
||||
|
||||
|
||||
|
||||
def get_main_js_errors_sessions_table(timestamp=0):
|
||||
return get_main_events_table(timestamp=timestamp)
|
||||
|
||||
|
||||
def get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEventType], platform="web"):
|
||||
defs = {
|
||||
schemas.EventType.CLICK: "CLICK",
|
||||
schemas.EventType.INPUT: "INPUT",
|
||||
schemas.EventType.LOCATION: "LOCATION",
|
||||
schemas.PerformanceEventType.LOCATION_DOM_COMPLETE: "LOCATION",
|
||||
schemas.PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME: "LOCATION",
|
||||
schemas.PerformanceEventType.LOCATION_TTFB: "LOCATION",
|
||||
schemas.EventType.CUSTOM: "CUSTOM",
|
||||
schemas.EventType.REQUEST: "REQUEST",
|
||||
schemas.EventType.REQUEST_DETAILS: "REQUEST",
|
||||
schemas.PerformanceEventType.FETCH_FAILED: "REQUEST",
|
||||
schemas.GraphqlFilterType.GRAPHQL_NAME: "GRAPHQL",
|
||||
schemas.EventType.STATE_ACTION: "STATEACTION",
|
||||
schemas.EventType.ERROR: "ERROR",
|
||||
schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD: 'PERFORMANCE',
|
||||
schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE: 'PERFORMANCE',
|
||||
schemas.FetchFilterType.FETCH_URL: 'REQUEST'
|
||||
}
|
||||
defs_mobile = {
|
||||
schemas.EventType.CLICK_MOBILE: "TAP",
|
||||
schemas.EventType.INPUT_MOBILE: "INPUT",
|
||||
schemas.EventType.CUSTOM_MOBILE: "CUSTOM",
|
||||
schemas.EventType.REQUEST_MOBILE: "REQUEST",
|
||||
schemas.EventType.ERROR_MOBILE: "CRASH",
|
||||
schemas.EventType.VIEW_MOBILE: "VIEW",
|
||||
schemas.EventType.SWIPE_MOBILE: "SWIPE"
|
||||
}
|
||||
if platform != "web" and event_type in defs_mobile:
|
||||
return defs_mobile.get(event_type)
|
||||
if event_type not in defs:
|
||||
raise Exception(f"unsupported EventType:{event_type}")
|
||||
return defs.get(event_type)
|
||||
|
|
@ -166,7 +166,7 @@ class PostgresClient:
|
|||
|
||||
|
||||
async def init():
|
||||
logger.info(f">PG_POOL:{config('PG_POOL', default=None)}")
|
||||
logger.info(f">use PG_POOL:{config('PG_POOL', default=True)}")
|
||||
if config('PG_POOL', cast=bool, default=True):
|
||||
make_pool()
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,12 @@ assistList=/sockets-list
|
|||
CANVAS_PATTERN=%(sessionId)s/%(recordingId)s.tar.zst
|
||||
captcha_key=
|
||||
captcha_server=
|
||||
CH_COMPRESSION=true
|
||||
ch_host=
|
||||
ch_port=9000
|
||||
ch_port_http=8123
|
||||
ch_receive_timeout=10
|
||||
ch_timeout=30
|
||||
change_password_link=/reset-password?invitation=%s&&pass=%s
|
||||
DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob
|
||||
EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)sdevtools
|
||||
|
|
@ -63,4 +69,7 @@ SITE_URL=
|
|||
sourcemaps_bucket=sourcemaps
|
||||
sourcemaps_reader=http://sourcemapreader-openreplay.app.svc.cluster.local:9000/sourcemaps/{}/sourcemaps
|
||||
STAGE=default-foss
|
||||
TZ=UTC
|
||||
TZ=UTC
|
||||
EXP_CH_DRIVER=true
|
||||
EXP_AUTOCOMPLETE=true
|
||||
EXP_ALERTS=true
|
||||
|
|
@ -1,18 +1,19 @@
|
|||
# Keep this version to not have conflicts between requests and boto3
|
||||
urllib3==1.26.16
|
||||
urllib3==2.2.3
|
||||
requests==2.32.3
|
||||
boto3==1.35.60
|
||||
pyjwt==2.9.0
|
||||
boto3==1.35.76
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.3
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
clickhouse-connect==0.8.9
|
||||
elasticsearch==8.16.0
|
||||
jira==3.8.0
|
||||
cachetools==5.5.0
|
||||
|
||||
|
||||
|
||||
fastapi==0.115.5
|
||||
uvicorn[standard]==0.32.0
|
||||
fastapi==0.115.6
|
||||
uvicorn[standard]==0.32.1
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.9.2
|
||||
apscheduler==3.10.4
|
||||
pydantic[email]==2.10.3
|
||||
apscheduler==3.11.0
|
||||
|
|
|
|||
|
|
@ -1,20 +1,21 @@
|
|||
# Keep this version to not have conflicts between requests and boto3
|
||||
urllib3==1.26.16
|
||||
urllib3==2.2.3
|
||||
requests==2.32.3
|
||||
boto3==1.35.60
|
||||
pyjwt==2.9.0
|
||||
boto3==1.35.76
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.3
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
clickhouse-connect==0.8.9
|
||||
elasticsearch==8.16.0
|
||||
jira==3.8.0
|
||||
cachetools==5.5.0
|
||||
|
||||
|
||||
|
||||
fastapi==0.115.5
|
||||
uvicorn[standard]==0.32.0
|
||||
fastapi==0.115.6
|
||||
uvicorn[standard]==0.32.1
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.9.2
|
||||
apscheduler==3.10.4
|
||||
pydantic[email]==2.10.3
|
||||
apscheduler==3.11.0
|
||||
|
||||
redis==5.2.0
|
||||
redis==5.2.1
|
||||
|
|
|
|||
|
|
@ -4,13 +4,18 @@ from decouple import config
|
|||
from fastapi import Depends, Body, BackgroundTasks
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assignments, projects, alerts, issues, \
|
||||
integrations_manager, metadata, log_tool_elasticsearch, log_tool_datadog, log_tool_stackdriver, reset_password, \
|
||||
log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, sessions, log_tool_newrelic, announcements, \
|
||||
log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, assist, mobile, tenants, boarding, \
|
||||
notifications, webhook, users, custom_metrics, saved_search, integrations_global, tags, autocomplete
|
||||
from chalicelib.core.collaboration_msteams import MSTeams
|
||||
from chalicelib.core.collaboration_slack import Slack
|
||||
from chalicelib.core import sourcemaps, events, projects, alerts, issues, \
|
||||
metadata, reset_password, \
|
||||
log_tools, sessions, announcements, \
|
||||
weekly_report, assist, mobile, tenants, boarding, \
|
||||
notifications, webhook, users, custom_metrics, saved_search, tags, autocomplete
|
||||
from chalicelib.core.issue_tracking import integration_github, integrations_global, integrations_manager, \
|
||||
integration_jira_cloud
|
||||
from chalicelib.core.log_tools import datadog, newrelic, stackdriver, elasticsearch, \
|
||||
sentry, bugsnag, cloudwatch, sumologic, rollbar
|
||||
from chalicelib.core.sessions import sessions_assignments
|
||||
from chalicelib.core.collaborations.collaboration_msteams import MSTeams
|
||||
from chalicelib.core.collaborations.collaboration_slack import Slack
|
||||
from or_dependencies import OR_context, OR_role
|
||||
from routers.base import get_routers
|
||||
|
||||
|
|
@ -91,217 +96,217 @@ def integration_notify(projectId: int, integration: str, webhookId: int, source:
|
|||
|
||||
@app.get('/integrations/sentry', tags=["integrations"])
|
||||
def get_all_sentry(context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_sentry.get_all(tenant_id=context.tenant_id)}
|
||||
return {"data": sentry.get_all(tenant_id=context.tenant_id)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/integrations/sentry', tags=["integrations"])
|
||||
def get_sentry(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_sentry.get(project_id=projectId)}
|
||||
return {"data": sentry.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/integrations/sentry', tags=["integrations"])
|
||||
def add_edit_sentry(projectId: int, data: schemas.IntegrationSentrySchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_sentry.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
|
||||
return {"data": sentry.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.delete('/{projectId}/integrations/sentry', tags=["integrations"])
|
||||
def delete_sentry(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_sentry.delete(tenant_id=context.tenant_id, project_id=projectId)}
|
||||
return {"data": sentry.delete(tenant_id=context.tenant_id, project_id=projectId)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/integrations/sentry/events/{eventId}', tags=["integrations"])
|
||||
def proxy_sentry(projectId: int, eventId: str, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_sentry.proxy_get(tenant_id=context.tenant_id, project_id=projectId, event_id=eventId)}
|
||||
return {"data": sentry.proxy_get(tenant_id=context.tenant_id, project_id=projectId, event_id=eventId)}
|
||||
|
||||
|
||||
@app.get('/integrations/datadog', tags=["integrations"])
|
||||
def get_all_datadog(context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_datadog.get_all(tenant_id=context.tenant_id)}
|
||||
return {"data": datadog.get_all(tenant_id=context.tenant_id)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/integrations/datadog', tags=["integrations"])
|
||||
def get_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_datadog.get(project_id=projectId)}
|
||||
return {"data": datadog.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/integrations/datadog', tags=["integrations"])
|
||||
def add_edit_datadog(projectId: int, data: schemas.IntegrationDatadogSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_datadog.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
|
||||
return {"data": datadog.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.delete('/{projectId}/integrations/datadog', tags=["integrations"])
|
||||
def delete_datadog(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_datadog.delete(tenant_id=context.tenant_id, project_id=projectId)}
|
||||
return {"data": datadog.delete(tenant_id=context.tenant_id, project_id=projectId)}
|
||||
|
||||
|
||||
@app.get('/integrations/stackdriver', tags=["integrations"])
|
||||
def get_all_stackdriver(context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_stackdriver.get_all(tenant_id=context.tenant_id)}
|
||||
return {"data": stackdriver.get_all(tenant_id=context.tenant_id)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/integrations/stackdriver', tags=["integrations"])
|
||||
def get_stackdriver(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_stackdriver.get(project_id=projectId)}
|
||||
return {"data": stackdriver.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/integrations/stackdriver', tags=["integrations"])
|
||||
def add_edit_stackdriver(projectId: int, data: schemas.IntegartionStackdriverSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_stackdriver.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
|
||||
return {"data": stackdriver.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.delete('/{projectId}/integrations/stackdriver', tags=["integrations"])
|
||||
def delete_stackdriver(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_stackdriver.delete(tenant_id=context.tenant_id, project_id=projectId)}
|
||||
return {"data": stackdriver.delete(tenant_id=context.tenant_id, project_id=projectId)}
|
||||
|
||||
|
||||
@app.get('/integrations/newrelic', tags=["integrations"])
|
||||
def get_all_newrelic(context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_newrelic.get_all(tenant_id=context.tenant_id)}
|
||||
return {"data": newrelic.get_all(tenant_id=context.tenant_id)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/integrations/newrelic', tags=["integrations"])
|
||||
def get_newrelic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_newrelic.get(project_id=projectId)}
|
||||
return {"data": newrelic.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/integrations/newrelic', tags=["integrations"])
|
||||
def add_edit_newrelic(projectId: int, data: schemas.IntegrationNewrelicSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_newrelic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
|
||||
return {"data": newrelic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.delete('/{projectId}/integrations/newrelic', tags=["integrations"])
|
||||
def delete_newrelic(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_newrelic.delete(tenant_id=context.tenant_id, project_id=projectId)}
|
||||
return {"data": newrelic.delete(tenant_id=context.tenant_id, project_id=projectId)}
|
||||
|
||||
|
||||
@app.get('/integrations/rollbar', tags=["integrations"])
|
||||
def get_all_rollbar(context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_rollbar.get_all(tenant_id=context.tenant_id)}
|
||||
return {"data": rollbar.get_all(tenant_id=context.tenant_id)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/integrations/rollbar', tags=["integrations"])
|
||||
def get_rollbar(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_rollbar.get(project_id=projectId)}
|
||||
return {"data": rollbar.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/integrations/rollbar', tags=["integrations"])
|
||||
def add_edit_rollbar(projectId: int, data: schemas.IntegrationRollbarSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_rollbar.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
|
||||
return {"data": rollbar.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.delete('/{projectId}/integrations/rollbar', tags=["integrations"])
|
||||
def delete_datadog(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_rollbar.delete(tenant_id=context.tenant_id, project_id=projectId)}
|
||||
return {"data": rollbar.delete(tenant_id=context.tenant_id, project_id=projectId)}
|
||||
|
||||
|
||||
@app.post('/integrations/bugsnag/list_projects', tags=["integrations"])
|
||||
def list_projects_bugsnag(data: schemas.IntegrationBugsnagBasicSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_bugsnag.list_projects(auth_token=data.authorization_token)}
|
||||
return {"data": bugsnag.list_projects(auth_token=data.authorization_token)}
|
||||
|
||||
|
||||
@app.get('/integrations/bugsnag', tags=["integrations"])
|
||||
def get_all_bugsnag(context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_bugsnag.get_all(tenant_id=context.tenant_id)}
|
||||
return {"data": bugsnag.get_all(tenant_id=context.tenant_id)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/integrations/bugsnag', tags=["integrations"])
|
||||
def get_bugsnag(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_bugsnag.get(project_id=projectId)}
|
||||
return {"data": bugsnag.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/integrations/bugsnag', tags=["integrations"])
|
||||
def add_edit_bugsnag(projectId: int, data: schemas.IntegrationBugsnagSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_bugsnag.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
|
||||
return {"data": bugsnag.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.delete('/{projectId}/integrations/bugsnag', tags=["integrations"])
|
||||
def delete_bugsnag(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_bugsnag.delete(tenant_id=context.tenant_id, project_id=projectId)}
|
||||
return {"data": bugsnag.delete(tenant_id=context.tenant_id, project_id=projectId)}
|
||||
|
||||
|
||||
@app.post('/integrations/cloudwatch/list_groups', tags=["integrations"])
|
||||
def list_groups_cloudwatch(data: schemas.IntegrationCloudwatchBasicSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_cloudwatch.list_log_groups(aws_access_key_id=data.awsAccessKeyId,
|
||||
aws_secret_access_key=data.awsSecretAccessKey,
|
||||
region=data.region)}
|
||||
return {"data": cloudwatch.list_log_groups(aws_access_key_id=data.awsAccessKeyId,
|
||||
aws_secret_access_key=data.awsSecretAccessKey,
|
||||
region=data.region)}
|
||||
|
||||
|
||||
@app.get('/integrations/cloudwatch', tags=["integrations"])
|
||||
def get_all_cloudwatch(context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_cloudwatch.get_all(tenant_id=context.tenant_id)}
|
||||
return {"data": cloudwatch.get_all(tenant_id=context.tenant_id)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/integrations/cloudwatch', tags=["integrations"])
|
||||
def get_cloudwatch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_cloudwatch.get(project_id=projectId)}
|
||||
return {"data": cloudwatch.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/integrations/cloudwatch', tags=["integrations"])
|
||||
def add_edit_cloudwatch(projectId: int, data: schemas.IntegrationCloudwatchSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_cloudwatch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
|
||||
return {"data": cloudwatch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.delete('/{projectId}/integrations/cloudwatch', tags=["integrations"])
|
||||
def delete_cloudwatch(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_cloudwatch.delete(tenant_id=context.tenant_id, project_id=projectId)}
|
||||
return {"data": cloudwatch.delete(tenant_id=context.tenant_id, project_id=projectId)}
|
||||
|
||||
|
||||
@app.get('/integrations/elasticsearch', tags=["integrations"])
|
||||
def get_all_elasticsearch(context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_elasticsearch.get_all(tenant_id=context.tenant_id)}
|
||||
return {"data": elasticsearch.get_all(tenant_id=context.tenant_id)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/integrations/elasticsearch', tags=["integrations"])
|
||||
def get_elasticsearch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_elasticsearch.get(project_id=projectId)}
|
||||
return {"data": elasticsearch.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.post('/integrations/elasticsearch/test', tags=["integrations"])
|
||||
def test_elasticsearch_connection(data: schemas.IntegrationElasticsearchTestSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_elasticsearch.ping(tenant_id=context.tenant_id, data=data)}
|
||||
return {"data": elasticsearch.ping(tenant_id=context.tenant_id, data=data)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/integrations/elasticsearch', tags=["integrations"])
|
||||
def add_edit_elasticsearch(projectId: int, data: schemas.IntegrationElasticsearchSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {
|
||||
"data": log_tool_elasticsearch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
|
||||
"data": elasticsearch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.delete('/{projectId}/integrations/elasticsearch', tags=["integrations"])
|
||||
def delete_elasticsearch(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_elasticsearch.delete(tenant_id=context.tenant_id, project_id=projectId)}
|
||||
return {"data": elasticsearch.delete(tenant_id=context.tenant_id, project_id=projectId)}
|
||||
|
||||
|
||||
@app.get('/integrations/sumologic', tags=["integrations"])
|
||||
def get_all_sumologic(context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_sumologic.get_all(tenant_id=context.tenant_id)}
|
||||
return {"data": sumologic.get_all(tenant_id=context.tenant_id)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/integrations/sumologic', tags=["integrations"])
|
||||
def get_sumologic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_sumologic.get(project_id=projectId)}
|
||||
return {"data": sumologic.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/integrations/sumologic', tags=["integrations"])
|
||||
def add_edit_sumologic(projectId: int, data: schemas.IntegrationSumologicSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_sumologic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
|
||||
return {"data": sumologic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.delete('/{projectId}/integrations/sumologic', tags=["integrations"])
|
||||
def delete_sumologic(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_sumologic.delete(tenant_id=context.tenant_id, project_id=projectId)}
|
||||
return {"data": sumologic.delete(tenant_id=context.tenant_id, project_id=projectId)}
|
||||
|
||||
|
||||
@app.get('/integrations/issues', tags=["integrations"])
|
||||
|
|
|
|||
|
|
@ -8,13 +8,13 @@ from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Re
|
|||
|
||||
import schemas
|
||||
from chalicelib.core import scope
|
||||
from chalicelib.core import sessions, errors, errors_viewed, errors_favorite, sessions_assignments, heatmaps, \
|
||||
sessions_favorite, assist, sessions_notes, sessions_replay, signup, feature_flags
|
||||
from chalicelib.core import sessions_viewed
|
||||
from chalicelib.core import sessions, errors, errors_viewed, errors_favorite, heatmaps, \
|
||||
assist, signup, feature_flags
|
||||
from chalicelib.core.sessions import sessions_notes, sessions_replay, sessions_favorite, sessions_viewed, \
|
||||
sessions_assignments, unprocessed_sessions
|
||||
from chalicelib.core import tenants, users, projects, license
|
||||
from chalicelib.core import unprocessed_sessions
|
||||
from chalicelib.core import webhook
|
||||
from chalicelib.core.collaboration_slack import Slack
|
||||
from chalicelib.core.collaborations.collaboration_slack import Slack
|
||||
from chalicelib.utils import captcha, smtp
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
|
|
|||
17
api/routers/subs/product_anaytics.py
Normal file
17
api/routers/subs/product_anaytics.py
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
from typing import Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import product_anaytics2
|
||||
from fastapi import Body, Depends
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
|
||||
@app.post('/{projectId}/events/search', tags=["dashboard"])
|
||||
def search_events(projectId: int,
|
||||
# data: schemas.CreateDashboardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return product_anaytics2.search_events(project_id=projectId, data={})
|
||||
|
|
@ -11,59 +11,6 @@ from .transformers_validators import transform_email, remove_whitespace, remove_
|
|||
force_is_event, NAME_PATTERN, int_to_string, check_alphanumeric
|
||||
|
||||
|
||||
def transform_old_filter_type(cls, values):
|
||||
if values.get("type") is None:
|
||||
return values
|
||||
values["type"] = {
|
||||
# filters
|
||||
"USEROS": FilterType.USER_OS.value,
|
||||
"USERBROWSER": FilterType.USER_BROWSER.value,
|
||||
"USERDEVICE": FilterType.USER_DEVICE.value,
|
||||
"USERCOUNTRY": FilterType.USER_COUNTRY.value,
|
||||
"USERID": FilterType.USER_ID.value,
|
||||
"USERANONYMOUSID": FilterType.USER_ANONYMOUS_ID.value,
|
||||
"REFERRER": FilterType.REFERRER.value,
|
||||
"REVID": FilterType.REV_ID.value,
|
||||
"USEROS_IOS": FilterType.USER_OS_MOBILE.value,
|
||||
"USERDEVICE_IOS": FilterType.USER_DEVICE_MOBILE.value,
|
||||
"USERCOUNTRY_IOS": FilterType.USER_COUNTRY_MOBILE.value,
|
||||
"USERID_IOS": FilterType.USER_ID_MOBILE.value,
|
||||
"USERANONYMOUSID_IOS": FilterType.USER_ANONYMOUS_ID_MOBILE.value,
|
||||
"REVID_IOS": FilterType.REV_ID_MOBILE.value,
|
||||
"DURATION": FilterType.DURATION.value,
|
||||
"PLATFORM": FilterType.PLATFORM.value,
|
||||
"METADATA": FilterType.METADATA.value,
|
||||
"ISSUE": FilterType.ISSUE.value,
|
||||
"EVENTS_COUNT": FilterType.EVENTS_COUNT.value,
|
||||
"UTM_SOURCE": FilterType.UTM_SOURCE.value,
|
||||
"UTM_MEDIUM": FilterType.UTM_MEDIUM.value,
|
||||
"UTM_CAMPAIGN": FilterType.UTM_CAMPAIGN.value,
|
||||
# events:
|
||||
"CLICK": EventType.CLICK.value,
|
||||
"INPUT": EventType.INPUT.value,
|
||||
"LOCATION": EventType.LOCATION.value,
|
||||
"CUSTOM": EventType.CUSTOM.value,
|
||||
"REQUEST": EventType.REQUEST.value,
|
||||
"FETCH": EventType.REQUEST_DETAILS.value,
|
||||
"GRAPHQL": EventType.GRAPHQL.value,
|
||||
"STATEACTION": EventType.STATE_ACTION.value,
|
||||
"ERROR": EventType.ERROR.value,
|
||||
"CLICK_IOS": EventType.CLICK_MOBILE.value,
|
||||
"INPUT_IOS": EventType.INPUT_MOBILE.value,
|
||||
"VIEW_IOS": EventType.VIEW_MOBILE.value,
|
||||
"CUSTOM_IOS": EventType.CUSTOM_MOBILE.value,
|
||||
"REQUEST_IOS": EventType.REQUEST_MOBILE.value,
|
||||
"ERROR_IOS": EventType.ERROR_MOBILE.value,
|
||||
"DOM_COMPLETE": PerformanceEventType.LOCATION_DOM_COMPLETE.value,
|
||||
"LARGEST_CONTENTFUL_PAINT_TIME": PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME.value,
|
||||
"TTFB": PerformanceEventType.LOCATION_TTFB.value,
|
||||
"AVG_CPU_LOAD": PerformanceEventType.LOCATION_AVG_CPU_LOAD.value,
|
||||
"AVG_MEMORY_USAGE": PerformanceEventType.LOCATION_AVG_MEMORY_USAGE.value,
|
||||
"FETCH_FAILED": PerformanceEventType.FETCH_FAILED.value,
|
||||
}.get(values["type"], values["type"])
|
||||
return values
|
||||
|
||||
|
||||
class _GRecaptcha(BaseModel):
|
||||
g_recaptcha_response: Optional[str] = Field(default=None, alias='g-recaptcha-response')
|
||||
|
||||
|
|
@ -211,7 +158,8 @@ class IssueTrackingJiraSchema(IssueTrackingIntegration):
|
|||
|
||||
class WebhookSchema(BaseModel):
|
||||
webhook_id: Optional[int] = Field(default=None)
|
||||
endpoint: AnyHttpUrl = Field(...)
|
||||
processed_endpoint: AnyHttpUrl = Field(..., alias="endpoint")
|
||||
endpoint: Optional[str] = Field(default=None, doc_hidden=True)
|
||||
auth_header: Optional[str] = Field(default=None)
|
||||
name: str = Field(default="", max_length=100, pattern=NAME_PATTERN)
|
||||
|
||||
|
|
@ -601,7 +549,6 @@ class SessionSearchEventSchema2(BaseModel):
|
|||
|
||||
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
|
||||
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
|
||||
_transform = model_validator(mode='before')(transform_old_filter_type)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def event_validator(self):
|
||||
|
|
@ -638,7 +585,6 @@ class SessionSearchFilterSchema(BaseModel):
|
|||
source: Optional[Union[ErrorSource, str]] = Field(default=None)
|
||||
|
||||
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
|
||||
_transform = model_validator(mode='before')(transform_old_filter_type)
|
||||
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
|
||||
|
||||
@model_validator(mode="before")
|
||||
|
|
@ -754,6 +700,8 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
|||
for f in values.get("filters", []):
|
||||
vals = []
|
||||
for v in f.get("value", []):
|
||||
if f.get("type", "") == FilterType.DURATION.value and v is None:
|
||||
v = 0
|
||||
if v is not None and (f.get("type", "") != FilterType.DURATION.value
|
||||
or str(v).isnumeric()):
|
||||
vals.append(v)
|
||||
|
|
@ -895,6 +843,11 @@ class CardSeriesSchema(BaseModel):
|
|||
class MetricTimeseriesViewType(str, Enum):
|
||||
LINE_CHART = "lineChart"
|
||||
AREA_CHART = "areaChart"
|
||||
BAR_CHART = "barChart"
|
||||
PIE_CHART = "pieChart"
|
||||
PROGRESS_CHART = "progressChart"
|
||||
TABLE_CHART = "table"
|
||||
METRIC_CHART = "metric"
|
||||
|
||||
|
||||
class MetricTableViewType(str, Enum):
|
||||
|
|
@ -918,7 +871,6 @@ class MetricType(str, Enum):
|
|||
RETENTION = "retention"
|
||||
STICKINESS = "stickiness"
|
||||
HEAT_MAP = "heatMap"
|
||||
INSIGHTS = "insights"
|
||||
|
||||
|
||||
class MetricOfErrors(str, Enum):
|
||||
|
|
@ -1194,31 +1146,6 @@ class CardHeatMap(__CardSchema):
|
|||
return self
|
||||
|
||||
|
||||
class MetricOfInsights(str, Enum):
|
||||
ISSUE_CATEGORIES = "issueCategories"
|
||||
|
||||
|
||||
class CardInsights(__CardSchema):
|
||||
metric_type: Literal[MetricType.INSIGHTS]
|
||||
metric_of: MetricOfInsights = Field(default=MetricOfInsights.ISSUE_CATEGORIES)
|
||||
view_type: MetricOtherViewType = Field(...)
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def __enforce_default(cls, values):
|
||||
values["view_type"] = MetricOtherViewType.LIST_CHART
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __transform(self):
|
||||
self.metric_of = MetricOfInsights(self.metric_of)
|
||||
return self
|
||||
|
||||
@model_validator(mode="after")
|
||||
def restrictions(self):
|
||||
raise ValueError(f"metricType:{MetricType.INSIGHTS} not supported yet.")
|
||||
|
||||
|
||||
class CardPathAnalysisSeriesSchema(CardSeriesSchema):
|
||||
name: Optional[str] = Field(default=None)
|
||||
filter: PathAnalysisSchema = Field(...)
|
||||
|
|
@ -1295,7 +1222,7 @@ __cards_union_base = Union[
|
|||
CardErrors,
|
||||
CardWebVital, CardHeatMap,
|
||||
CardPathAnalysis]
|
||||
CardSchema = ORUnion(Union[__cards_union_base, CardInsights], discriminator='metric_type')
|
||||
CardSchema = ORUnion(__cards_union_base, discriminator='metric_type')
|
||||
|
||||
|
||||
class UpdateCardStatusSchema(BaseModel):
|
||||
|
|
@ -1379,8 +1306,6 @@ class LiveSessionSearchFilterSchema(BaseModel):
|
|||
operator: Literal[SearchEventOperator.IS, SearchEventOperator.CONTAINS] \
|
||||
= Field(default=SearchEventOperator.CONTAINS)
|
||||
|
||||
_transform = model_validator(mode='before')(transform_old_filter_type)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __validator(self):
|
||||
if self.type is not None and self.type == LiveFilterType.METADATA:
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import (
|
|||
config "openreplay/backend/internal/config/db"
|
||||
"openreplay/backend/internal/db"
|
||||
"openreplay/backend/internal/db/datasaver"
|
||||
"openreplay/backend/pkg/db/clickhouse"
|
||||
"openreplay/backend/pkg/db/postgres"
|
||||
"openreplay/backend/pkg/db/postgres/pool"
|
||||
"openreplay/backend/pkg/db/redis"
|
||||
|
|
@ -33,9 +34,15 @@ func main() {
|
|||
}
|
||||
defer pgConn.Close()
|
||||
|
||||
// Init events module
|
||||
pg := postgres.NewConn(log, pgConn)
|
||||
defer pg.Close()
|
||||
chConn := clickhouse.NewConnector(cfg.Clickhouse)
|
||||
if err := chConn.Prepare(); err != nil {
|
||||
log.Fatal(ctx, "can't prepare clickhouse: %s", err)
|
||||
}
|
||||
defer chConn.Stop()
|
||||
|
||||
// Init db proxy module (postgres + clickhouse + batches)
|
||||
dbProxy := postgres.NewConn(log, pgConn, chConn)
|
||||
defer dbProxy.Close()
|
||||
|
||||
// Init redis connection
|
||||
redisClient, err := redis.New(&cfg.Redis)
|
||||
|
|
@ -49,7 +56,7 @@ func main() {
|
|||
tagsManager := tags.New(log, pgConn)
|
||||
|
||||
// Init data saver
|
||||
saver := datasaver.New(log, cfg, pg, sessManager, tagsManager)
|
||||
saver := datasaver.New(log, cfg, dbProxy, chConn, sessManager, tagsManager)
|
||||
|
||||
// Message filter
|
||||
msgFilter := []int{
|
||||
|
|
|
|||
|
|
@ -57,10 +57,18 @@ type Redshift struct {
|
|||
// Clickhouse config
|
||||
|
||||
type Clickhouse struct {
|
||||
URL string `env:"CLICKHOUSE_STRING"`
|
||||
Database string `env:"CLICKHOUSE_DATABASE,default=default"`
|
||||
UserName string `env:"CLICKHOUSE_USERNAME,default=default"`
|
||||
Password string `env:"CLICKHOUSE_PASSWORD,default="`
|
||||
URL string `env:"CLICKHOUSE_STRING"`
|
||||
Database string `env:"CLICKHOUSE_DATABASE,default=default"`
|
||||
UserName string `env:"CLICKHOUSE_USERNAME,default=default"`
|
||||
Password string `env:"CLICKHOUSE_PASSWORD,default="`
|
||||
LegacyUserName string `env:"CH_USERNAME,default=default"`
|
||||
LegacyPassword string `env:"CH_PASSWORD,default="`
|
||||
}
|
||||
|
||||
func (cfg *Clickhouse) GetTrimmedURL() string {
|
||||
chUrl := strings.TrimPrefix(cfg.URL, "tcp://")
|
||||
chUrl = strings.TrimSuffix(chUrl, "/default")
|
||||
return chUrl
|
||||
}
|
||||
|
||||
// ElasticSearch config
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import (
|
|||
type Config struct {
|
||||
common.Config
|
||||
common.Postgres
|
||||
common.Clickhouse
|
||||
redis.Redis
|
||||
ProjectExpiration time.Duration `env:"PROJECT_EXPIRATION,default=10m"`
|
||||
LoggerTimeout int `env:"LOG_QUEUE_STATS_INTERVAL_SEC,required"`
|
||||
|
|
|
|||
9
backend/internal/db/datasaver/fts.go
Normal file
9
backend/internal/db/datasaver/fts.go
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
package datasaver
|
||||
|
||||
import (
|
||||
"openreplay/backend/pkg/messages"
|
||||
)
|
||||
|
||||
func (s *saverImpl) init() {}
|
||||
|
||||
func (s *saverImpl) sendToFTS(msg messages.Message, projID uint32) {}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
package datasaver
|
||||
|
||||
import (
|
||||
. "openreplay/backend/pkg/messages"
|
||||
)
|
||||
|
||||
func (s *saverImpl) init() {
|
||||
// noop
|
||||
}
|
||||
|
||||
func (s *saverImpl) handleExtraMessage(msg Message) error {
|
||||
switch m := msg.(type) {
|
||||
case *PerformanceTrackAggr:
|
||||
return s.pg.InsertWebStatsPerformance(m)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
72
backend/internal/db/datasaver/mobile.go
Normal file
72
backend/internal/db/datasaver/mobile.go
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
package datasaver
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"openreplay/backend/pkg/messages"
|
||||
"openreplay/backend/pkg/sessions"
|
||||
)
|
||||
|
||||
func (s *saverImpl) handleMobileMessage(sessCtx context.Context, session *sessions.Session, msg messages.Message) error {
|
||||
switch m := msg.(type) {
|
||||
case *messages.MobileSessionEnd:
|
||||
return s.ch.InsertMobileSession(session)
|
||||
case *messages.MobileUserID:
|
||||
if err := s.sessions.UpdateUserID(session.SessionID, m.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERIDMOBILE", m.ID)
|
||||
return nil
|
||||
case *messages.MobileUserAnonymousID:
|
||||
if err := s.sessions.UpdateAnonymousID(session.SessionID, m.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERANONYMOUSIDMOBILE", m.ID)
|
||||
return nil
|
||||
case *messages.MobileMetadata:
|
||||
return s.sessions.UpdateMetadata(m.SessionID(), m.Key, m.Value)
|
||||
case *messages.MobileEvent:
|
||||
if err := s.pg.InsertMobileEvent(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertMobileCustom(session, m)
|
||||
case *messages.MobileClickEvent:
|
||||
if err := s.pg.InsertMobileClickEvent(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := s.sessions.UpdateEventsStats(session.SessionID, 1, 0); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertMobileClick(session, m)
|
||||
case *messages.MobileSwipeEvent:
|
||||
if err := s.pg.InsertMobileSwipeEvent(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := s.sessions.UpdateEventsStats(session.SessionID, 1, 0); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertMobileSwipe(session, m)
|
||||
case *messages.MobileInputEvent:
|
||||
if err := s.pg.InsertMobileInputEvent(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := s.sessions.UpdateEventsStats(session.SessionID, 1, 0); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertMobileInput(session, m)
|
||||
case *messages.MobileNetworkCall:
|
||||
if err := s.pg.InsertMobileNetworkCall(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertMobileRequest(session, m, session.SaveRequestPayload)
|
||||
case *messages.MobileCrash:
|
||||
if err := s.pg.InsertMobileCrash(session.SessionID, session.ProjectID, m); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := s.sessions.UpdateIssuesStats(session.SessionID, 1, 1000); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertMobileCrash(session, m)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
@ -30,11 +30,18 @@ type saverImpl struct {
|
|||
tags tags.Tags
|
||||
}
|
||||
|
||||
func New(log logger.Logger, cfg *db.Config, pg *postgres.Conn, session sessions.Sessions, tags tags.Tags) Saver {
|
||||
func New(log logger.Logger, cfg *db.Config, pg *postgres.Conn, ch clickhouse.Connector, session sessions.Sessions, tags tags.Tags) Saver {
|
||||
switch {
|
||||
case pg == nil:
|
||||
log.Fatal(context.Background(), "pg pool is empty")
|
||||
case ch == nil:
|
||||
log.Fatal(context.Background(), "ch pool is empty")
|
||||
}
|
||||
s := &saverImpl{
|
||||
log: log,
|
||||
cfg: cfg,
|
||||
pg: pg,
|
||||
ch: ch,
|
||||
sessions: session,
|
||||
tags: tags,
|
||||
}
|
||||
|
|
@ -43,21 +50,34 @@ func New(log logger.Logger, cfg *db.Config, pg *postgres.Conn, session sessions.
|
|||
}
|
||||
|
||||
func (s *saverImpl) Handle(msg Message) {
|
||||
sessCtx := context.WithValue(context.Background(), "sessionID", msg.SessionID())
|
||||
if msg.TypeID() == MsgCustomEvent {
|
||||
defer s.Handle(types.WrapCustomEvent(msg.(*CustomEvent)))
|
||||
}
|
||||
|
||||
var (
|
||||
sessCtx = context.WithValue(context.Background(), "sessionID", msg.SessionID())
|
||||
session *sessions.Session
|
||||
err error
|
||||
)
|
||||
if msg.TypeID() == MsgSessionEnd || msg.TypeID() == MsgMobileSessionEnd {
|
||||
session, err = s.sessions.GetUpdated(msg.SessionID(), true)
|
||||
} else {
|
||||
session, err = s.sessions.Get(msg.SessionID())
|
||||
}
|
||||
if err != nil || session == nil {
|
||||
s.log.Error(sessCtx, "error on session retrieving from cache: %v, SessionID: %v, Message: %v", err, msg.SessionID(), msg)
|
||||
return
|
||||
}
|
||||
|
||||
if IsMobileType(msg.TypeID()) {
|
||||
// Handle Mobile messages
|
||||
if err := s.handleMobileMessage(msg); err != nil {
|
||||
if err := s.handleMobileMessage(sessCtx, session, msg); err != nil {
|
||||
if !postgres.IsPkeyViolation(err) {
|
||||
s.log.Error(sessCtx, "mobile message insertion error, msg: %+v, err: %s", msg, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
} else {
|
||||
// Handle Web messages
|
||||
if err := s.handleMessage(msg); err != nil {
|
||||
if err := s.handleWebMessage(sessCtx, session, msg); err != nil {
|
||||
if !postgres.IsPkeyViolation(err) {
|
||||
s.log.Error(sessCtx, "web message insertion error, msg: %+v, err: %s", msg, err)
|
||||
}
|
||||
|
|
@ -65,180 +85,22 @@ func (s *saverImpl) Handle(msg Message) {
|
|||
}
|
||||
}
|
||||
|
||||
if err := s.handleExtraMessage(msg); err != nil {
|
||||
s.log.Error(sessCtx, "extra message insertion error, msg: %+v, err: %s", msg, err)
|
||||
}
|
||||
s.sendToFTS(msg, session.ProjectID)
|
||||
return
|
||||
}
|
||||
|
||||
func (s *saverImpl) handleMobileMessage(msg Message) error {
|
||||
session, err := s.sessions.Get(msg.SessionID())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
switch m := msg.(type) {
|
||||
case *MobileUserID:
|
||||
if err = s.sessions.UpdateUserID(session.SessionID, m.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERIDMOBILE", m.ID)
|
||||
return nil
|
||||
case *MobileUserAnonymousID:
|
||||
if err = s.sessions.UpdateAnonymousID(session.SessionID, m.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERANONYMOUSIDMOBILE", m.ID)
|
||||
return nil
|
||||
case *MobileMetadata:
|
||||
return s.sessions.UpdateMetadata(m.SessionID(), m.Key, m.Value)
|
||||
case *MobileEvent:
|
||||
return s.pg.InsertMobileEvent(session, m)
|
||||
case *MobileClickEvent:
|
||||
if err := s.pg.InsertMobileClickEvent(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.sessions.UpdateEventsStats(session.SessionID, 1, 0)
|
||||
case *MobileSwipeEvent:
|
||||
if err := s.pg.InsertMobileSwipeEvent(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.sessions.UpdateEventsStats(session.SessionID, 1, 0)
|
||||
case *MobileInputEvent:
|
||||
if err := s.pg.InsertMobileInputEvent(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.sessions.UpdateEventsStats(session.SessionID, 1, 0)
|
||||
case *MobileNetworkCall:
|
||||
return s.pg.InsertMobileNetworkCall(session, m)
|
||||
case *MobileCrash:
|
||||
if err := s.pg.InsertMobileCrash(session.SessionID, session.ProjectID, m); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.sessions.UpdateIssuesStats(session.SessionID, 1, 1000)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *saverImpl) handleMessage(msg Message) error {
|
||||
session, err := s.sessions.Get(msg.SessionID())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sessCtx := context.WithValue(context.Background(), "sessionID", msg.SessionID())
|
||||
switch m := msg.(type) {
|
||||
case *SessionStart:
|
||||
return s.pg.HandleStartEvent(m)
|
||||
case *SessionEnd:
|
||||
return s.pg.HandleEndEvent(m.SessionID())
|
||||
case *Metadata:
|
||||
return s.sessions.UpdateMetadata(m.SessionID(), m.Key, m.Value)
|
||||
case *IssueEvent:
|
||||
if m.Type == "dead_click" || m.Type == "click_rage" {
|
||||
if s.tags.ShouldIgnoreTag(session.ProjectID, m.Context) {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
err = s.pg.InsertIssueEvent(session, m)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return s.sessions.UpdateIssuesStats(session.SessionID, 0, postgres.GetIssueScore(m.Type))
|
||||
case *CustomIssue:
|
||||
ie := &IssueEvent{
|
||||
Type: "custom",
|
||||
Timestamp: m.Timestamp,
|
||||
MessageID: m.Index,
|
||||
ContextString: m.Name,
|
||||
Payload: m.Payload,
|
||||
}
|
||||
ie.SetMeta(m.Meta())
|
||||
if err = s.pg.InsertIssueEvent(session, ie); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.sessions.UpdateIssuesStats(session.SessionID, 0, postgres.GetIssueScore(ie.Type))
|
||||
case *UserID:
|
||||
if err = s.sessions.UpdateUserID(session.SessionID, m.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERID", m.ID)
|
||||
return nil
|
||||
case *UserAnonymousID:
|
||||
if err = s.sessions.UpdateAnonymousID(session.SessionID, m.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERANONYMOUSID", m.ID)
|
||||
return nil
|
||||
case *CustomEvent:
|
||||
return s.pg.InsertWebCustomEvent(session, m)
|
||||
case *MouseClick:
|
||||
if err = s.pg.InsertWebClickEvent(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.sessions.UpdateEventsStats(session.SessionID, 1, 0)
|
||||
case *PageEvent:
|
||||
if err = s.pg.InsertWebPageEvent(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
s.sessions.UpdateReferrer(session.SessionID, m.Referrer)
|
||||
s.sessions.UpdateUTM(session.SessionID, m.URL)
|
||||
return s.sessions.UpdateEventsStats(session.SessionID, 1, 1)
|
||||
case *NetworkRequest:
|
||||
return s.pg.InsertWebNetworkRequest(session, m)
|
||||
case *GraphQL:
|
||||
return s.pg.InsertWebGraphQL(session, m)
|
||||
case *JSException:
|
||||
wrapper, err := types.WrapJSException(m)
|
||||
if err != nil {
|
||||
s.log.Warn(sessCtx, "error on wrapping JSException: %v", err)
|
||||
}
|
||||
if err = s.pg.InsertWebErrorEvent(session, wrapper); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.sessions.UpdateIssuesStats(session.SessionID, 1, 1000)
|
||||
case *IntegrationEvent:
|
||||
return s.pg.InsertWebErrorEvent(session, types.WrapIntegrationEvent(m))
|
||||
case *InputChange:
|
||||
if err = s.pg.InsertInputChangeEvent(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.sessions.UpdateEventsStats(session.SessionID, 1, 0)
|
||||
case *MouseThrashing:
|
||||
if err = s.pg.InsertMouseThrashing(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.sessions.UpdateIssuesStats(session.SessionID, 0, 50)
|
||||
case *CanvasNode:
|
||||
if err = s.pg.InsertCanvasNode(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
case *TagTrigger:
|
||||
if err = s.pg.InsertTagTrigger(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *saverImpl) Commit() error {
|
||||
if s.pg != nil {
|
||||
s.pg.Commit()
|
||||
}
|
||||
if s.ch != nil {
|
||||
s.ch.Commit()
|
||||
}
|
||||
s.pg.Commit()
|
||||
s.ch.Commit()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *saverImpl) Close() error {
|
||||
if s.pg != nil {
|
||||
if err := s.pg.Close(); err != nil {
|
||||
s.log.Error(context.Background(), "pg.Close error: %s", err)
|
||||
}
|
||||
if err := s.pg.Close(); err != nil {
|
||||
s.log.Error(context.Background(), "pg.Close error: %s", err)
|
||||
}
|
||||
if s.ch != nil {
|
||||
if err := s.ch.Stop(); err != nil {
|
||||
s.log.Error(context.Background(), "ch.Close error: %s", err)
|
||||
}
|
||||
if err := s.ch.Stop(); err != nil {
|
||||
s.log.Error(context.Background(), "ch.Close error: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
146
backend/internal/db/datasaver/web.go
Normal file
146
backend/internal/db/datasaver/web.go
Normal file
|
|
@ -0,0 +1,146 @@
|
|||
package datasaver
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"openreplay/backend/pkg/db/postgres"
|
||||
"openreplay/backend/pkg/db/types"
|
||||
"openreplay/backend/pkg/messages"
|
||||
"openreplay/backend/pkg/sessions"
|
||||
)
|
||||
|
||||
func (s *saverImpl) handleWebMessage(sessCtx context.Context, session *sessions.Session, msg messages.Message) error {
|
||||
switch m := msg.(type) {
|
||||
case *messages.SessionStart:
|
||||
return s.pg.HandleStartEvent(m)
|
||||
case *messages.SessionEnd:
|
||||
if err := s.pg.HandleEndEvent(m.SessionID()); err != nil {
|
||||
return err
|
||||
}
|
||||
session, err := s.sessions.GetUpdated(m.SessionID(), true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertWebSession(session)
|
||||
case *messages.Metadata:
|
||||
return s.sessions.UpdateMetadata(m.SessionID(), m.Key, m.Value)
|
||||
case *messages.IssueEvent:
|
||||
if m.Type == "dead_click" || m.Type == "click_rage" {
|
||||
if s.tags.ShouldIgnoreTag(session.ProjectID, m.Context) {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
if err := s.pg.InsertIssueEvent(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := s.sessions.UpdateIssuesStats(session.SessionID, 0, postgres.GetIssueScore(m.Type)); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertIssue(session, m)
|
||||
case *messages.CustomIssue:
|
||||
ie := &messages.IssueEvent{
|
||||
Type: "custom",
|
||||
Timestamp: m.Timestamp,
|
||||
MessageID: m.Index,
|
||||
ContextString: m.Name,
|
||||
Payload: m.Payload,
|
||||
}
|
||||
ie.SetMeta(m.Meta())
|
||||
if err := s.pg.InsertIssueEvent(session, ie); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.sessions.UpdateIssuesStats(session.SessionID, 0, postgres.GetIssueScore(ie.Type))
|
||||
case *messages.UserID:
|
||||
if err := s.sessions.UpdateUserID(session.SessionID, m.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERID", m.ID)
|
||||
return nil
|
||||
case *messages.UserAnonymousID:
|
||||
if err := s.sessions.UpdateAnonymousID(session.SessionID, m.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERANONYMOUSID", m.ID)
|
||||
return nil
|
||||
case *messages.CustomEvent:
|
||||
if err := s.pg.InsertWebCustomEvent(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertCustom(session, m)
|
||||
case *messages.MouseClick:
|
||||
if err := s.pg.InsertWebClickEvent(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := s.sessions.UpdateEventsStats(session.SessionID, 1, 0); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertWebClickEvent(session, m)
|
||||
case *messages.PageEvent:
|
||||
if err := s.pg.InsertWebPageEvent(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
s.sessions.UpdateReferrer(session.SessionID, m.Referrer)
|
||||
s.sessions.UpdateUTM(session.SessionID, m.URL)
|
||||
if err := s.sessions.UpdateEventsStats(session.SessionID, 1, 1); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertWebPageEvent(session, m)
|
||||
case *messages.NetworkRequest:
|
||||
if err := s.pg.InsertWebNetworkRequest(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertRequest(session, m, session.SaveRequestPayload)
|
||||
case *messages.GraphQL:
|
||||
if err := s.pg.InsertWebGraphQL(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertGraphQL(session, m)
|
||||
case *messages.JSException:
|
||||
wrapper, err := types.WrapJSException(m)
|
||||
if err != nil {
|
||||
s.log.Warn(sessCtx, "error on wrapping JSException: %v", err)
|
||||
}
|
||||
if err = s.pg.InsertWebErrorEvent(session, wrapper); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := s.sessions.UpdateIssuesStats(session.SessionID, 1, 1000); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertWebErrorEvent(session, wrapper)
|
||||
case *messages.IntegrationEvent:
|
||||
if err := s.pg.InsertWebErrorEvent(session, types.WrapIntegrationEvent(m)); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertWebErrorEvent(session, types.WrapIntegrationEvent(m))
|
||||
case *messages.InputChange:
|
||||
if err := s.pg.InsertInputChangeEvent(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := s.sessions.UpdateEventsStats(session.SessionID, 1, 0); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertWebInputDuration(session, m)
|
||||
case *messages.MouseThrashing:
|
||||
if err := s.pg.InsertMouseThrashing(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := s.sessions.UpdateIssuesStats(session.SessionID, 0, 50); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertMouseThrashing(session, m)
|
||||
case *messages.CanvasNode:
|
||||
if err := s.pg.InsertCanvasNode(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
case *messages.TagTrigger:
|
||||
if err := s.pg.InsertTagTrigger(session, m); err != nil {
|
||||
return err
|
||||
}
|
||||
case *messages.PerformanceTrackAggr:
|
||||
if err := s.pg.InsertWebStatsPerformance(m); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.ch.InsertWebPerformanceTrackAggr(session, m)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
@ -5,10 +5,11 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"openreplay/backend/pkg/metrics/database"
|
||||
"time"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
|
||||
|
||||
"openreplay/backend/pkg/metrics/database"
|
||||
)
|
||||
|
||||
type Bulk interface {
|
||||
|
|
@ -1,19 +1,31 @@
|
|||
package clickhouse
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
|
||||
|
||||
"openreplay/backend/internal/config/common"
|
||||
"openreplay/backend/pkg/db/types"
|
||||
"openreplay/backend/pkg/hashid"
|
||||
"openreplay/backend/pkg/messages"
|
||||
"openreplay/backend/pkg/sessions"
|
||||
"openreplay/backend/pkg/url"
|
||||
)
|
||||
|
||||
type Connector interface {
|
||||
Prepare() error
|
||||
Commit() error
|
||||
Stop() error
|
||||
// Web
|
||||
InsertWebSession(session *sessions.Session) error
|
||||
InsertWebPageEvent(session *sessions.Session, msg *messages.PageEvent) error
|
||||
InsertWebClickEvent(session *sessions.Session, msg *messages.MouseClick) error
|
||||
InsertWebInputEvent(session *sessions.Session, msg *messages.InputEvent) error
|
||||
InsertWebErrorEvent(session *sessions.Session, msg *types.ErrorEvent) error
|
||||
InsertWebPerformanceTrackAggr(session *sessions.Session, msg *messages.PerformanceTrackAggr) error
|
||||
InsertAutocomplete(session *sessions.Session, msgType, msgValue string) error
|
||||
|
|
@ -21,4 +33,669 @@ type Connector interface {
|
|||
InsertCustom(session *sessions.Session, msg *messages.CustomEvent) error
|
||||
InsertGraphQL(session *sessions.Session, msg *messages.GraphQL) error
|
||||
InsertIssue(session *sessions.Session, msg *messages.IssueEvent) error
|
||||
InsertWebInputDuration(session *sessions.Session, msg *messages.InputChange) error
|
||||
InsertMouseThrashing(session *sessions.Session, msg *messages.MouseThrashing) error
|
||||
// Mobile
|
||||
InsertMobileSession(session *sessions.Session) error
|
||||
InsertMobileCustom(session *sessions.Session, msg *messages.MobileEvent) error
|
||||
InsertMobileClick(session *sessions.Session, msg *messages.MobileClickEvent) error
|
||||
InsertMobileSwipe(session *sessions.Session, msg *messages.MobileSwipeEvent) error
|
||||
InsertMobileInput(session *sessions.Session, msg *messages.MobileInputEvent) error
|
||||
InsertMobileRequest(session *sessions.Session, msg *messages.MobileNetworkCall, savePayload bool) error
|
||||
InsertMobileCrash(session *sessions.Session, msg *messages.MobileCrash) error
|
||||
}
|
||||
|
||||
type task struct {
|
||||
bulks []Bulk
|
||||
}
|
||||
|
||||
func NewTask() *task {
|
||||
return &task{bulks: make([]Bulk, 0, 21)}
|
||||
}
|
||||
|
||||
type connectorImpl struct {
|
||||
conn driver.Conn
|
||||
batches map[string]Bulk //driver.Batch
|
||||
workerTask chan *task
|
||||
done chan struct{}
|
||||
finished chan struct{}
|
||||
}
|
||||
|
||||
func NewConnector(cfg common.Clickhouse) Connector {
|
||||
conn, err := clickhouse.Open(&clickhouse.Options{
|
||||
Addr: []string{cfg.GetTrimmedURL()},
|
||||
Auth: clickhouse.Auth{
|
||||
Database: cfg.Database,
|
||||
Username: cfg.LegacyUserName,
|
||||
Password: cfg.LegacyPassword,
|
||||
},
|
||||
MaxOpenConns: 20,
|
||||
MaxIdleConns: 15,
|
||||
ConnMaxLifetime: 3 * time.Minute,
|
||||
Compression: &clickhouse.Compression{
|
||||
Method: clickhouse.CompressionLZ4,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
c := &connectorImpl{
|
||||
conn: conn,
|
||||
batches: make(map[string]Bulk, 20),
|
||||
workerTask: make(chan *task, 1),
|
||||
done: make(chan struct{}),
|
||||
finished: make(chan struct{}),
|
||||
}
|
||||
go c.worker()
|
||||
return c
|
||||
}
|
||||
|
||||
func (c *connectorImpl) newBatch(name, query string) error {
|
||||
batch, err := NewBulk(c.conn, name, query)
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't create new batch: %s", err)
|
||||
}
|
||||
c.batches[name] = batch
|
||||
return nil
|
||||
}
|
||||
|
||||
var batches = map[string]string{
|
||||
// Web
|
||||
"sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, timezone, utm_source, utm_medium, utm_campaign) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?, ?, ?, ?)",
|
||||
"autocompletes": "INSERT INTO experimental.autocomplete (project_id, type, value) VALUES (?, ?, SUBSTR(?, 1, 8000))",
|
||||
"pages": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, request_start, response_start, response_end, dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, load_event_end, first_paint, first_contentful_paint_time, speed_index, visually_complete, time_to_interactive, url_path, event_type) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?)",
|
||||
"clicks": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, label, hesitation_time, event_type, selector, normalized_x, normalized_y, url, url_path) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000))",
|
||||
"inputs": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, label, event_type, duration, hesitation_time) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
"errors": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, source, name, message, error_id, event_type, error_tags_keys, error_tags_values) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
"performance": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, event_type) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
"requests": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, request_body, response_body, status, method, duration, success, event_type, transfer_size, url_path) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000))",
|
||||
"custom": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, name, payload, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)",
|
||||
"graphql": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, name, request_body, response_body, event_type) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
"issuesEvents": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, issue_id, issue_type, event_type, url, url_path) VALUES (?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000))",
|
||||
"issues": "INSERT INTO experimental.issues (project_id, issue_id, type, context_string) VALUES (?, ?, ?, ?)",
|
||||
//Mobile
|
||||
"ios_sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, platform, timezone) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?, ?)",
|
||||
"ios_custom": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, name, payload, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)",
|
||||
"ios_clicks": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, label, event_type) VALUES (?, ?, ?, ?, ?, ?)",
|
||||
"ios_swipes": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, label, direction, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)",
|
||||
"ios_inputs": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, label, event_type) VALUES (?, ?, ?, ?, ?, ?)",
|
||||
"ios_requests": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, url, request_body, response_body, status, method, duration, success, event_type) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?)",
|
||||
"ios_crashes": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, name, reason, stacktrace, event_type) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
}
|
||||
|
||||
func (c *connectorImpl) Prepare() error {
|
||||
for table, query := range batches {
|
||||
if err := c.newBatch(table, query); err != nil {
|
||||
return fmt.Errorf("can't create %s batch: %s", table, err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) Commit() error {
|
||||
newTask := NewTask()
|
||||
for _, b := range c.batches {
|
||||
newTask.bulks = append(newTask.bulks, b)
|
||||
}
|
||||
c.batches = make(map[string]Bulk, 20)
|
||||
if err := c.Prepare(); err != nil {
|
||||
log.Printf("can't prepare new CH batch set: %s", err)
|
||||
}
|
||||
c.workerTask <- newTask
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) Stop() error {
|
||||
c.done <- struct{}{}
|
||||
<-c.finished
|
||||
return c.conn.Close()
|
||||
}
|
||||
|
||||
func (c *connectorImpl) sendBulks(t *task) {
|
||||
for _, b := range t.bulks {
|
||||
if err := b.Send(); err != nil {
|
||||
log.Printf("can't send batch: %s", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *connectorImpl) worker() {
|
||||
for {
|
||||
select {
|
||||
case t := <-c.workerTask:
|
||||
c.sendBulks(t)
|
||||
case <-c.done:
|
||||
for t := range c.workerTask {
|
||||
c.sendBulks(t)
|
||||
}
|
||||
c.finished <- struct{}{}
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *connectorImpl) checkError(name string, err error) {
|
||||
if err != clickhouse.ErrBatchAlreadySent {
|
||||
log.Printf("can't create %s batch after failed append operation: %s", name, err)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *connectorImpl) InsertWebInputDuration(session *sessions.Session, msg *messages.InputChange) error {
|
||||
if msg.Label == "" {
|
||||
return nil
|
||||
}
|
||||
if err := c.batches["inputs"].Append(
|
||||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
msg.MsgID(),
|
||||
datetime(msg.Timestamp),
|
||||
msg.Label,
|
||||
"INPUT",
|
||||
nullableUint16(uint16(msg.InputDuration)),
|
||||
nullableUint32(uint32(msg.HesitationTime)),
|
||||
); err != nil {
|
||||
c.checkError("inputs", err)
|
||||
return fmt.Errorf("can't append to inputs batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) InsertMouseThrashing(session *sessions.Session, msg *messages.MouseThrashing) error {
|
||||
issueID := hashid.MouseThrashingID(session.ProjectID, session.SessionID, msg.Timestamp)
|
||||
// Insert issue event to batches
|
||||
if err := c.batches["issuesEvents"].Append(
|
||||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
msg.MsgID(),
|
||||
datetime(msg.Timestamp),
|
||||
issueID,
|
||||
"mouse_thrashing",
|
||||
"ISSUE",
|
||||
msg.Url,
|
||||
extractUrlPath(msg.Url),
|
||||
); err != nil {
|
||||
c.checkError("issuesEvents", err)
|
||||
return fmt.Errorf("can't append to issuesEvents batch: %s", err)
|
||||
}
|
||||
if err := c.batches["issues"].Append(
|
||||
uint16(session.ProjectID),
|
||||
issueID,
|
||||
"mouse_thrashing",
|
||||
msg.Url,
|
||||
); err != nil {
|
||||
c.checkError("issues", err)
|
||||
return fmt.Errorf("can't append to issues batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) InsertIssue(session *sessions.Session, msg *messages.IssueEvent) error {
|
||||
issueID := hashid.IssueID(session.ProjectID, msg)
|
||||
// Check issue type before insert to avoid panic from clickhouse lib
|
||||
switch msg.Type {
|
||||
case "click_rage", "dead_click", "excessive_scrolling", "bad_request", "missing_resource", "memory", "cpu", "slow_resource", "slow_page_load", "crash", "ml_cpu", "ml_memory", "ml_dead_click", "ml_click_rage", "ml_mouse_thrashing", "ml_excessive_scrolling", "ml_slow_resources", "custom", "js_exception", "mouse_thrashing", "app_crash":
|
||||
default:
|
||||
return fmt.Errorf("unknown issueType: %s", msg.Type)
|
||||
}
|
||||
// Insert issue event to batches
|
||||
if err := c.batches["issuesEvents"].Append(
|
||||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
msg.MessageID,
|
||||
datetime(msg.Timestamp),
|
||||
issueID,
|
||||
msg.Type,
|
||||
"ISSUE",
|
||||
msg.URL,
|
||||
extractUrlPath(msg.URL),
|
||||
); err != nil {
|
||||
c.checkError("issuesEvents", err)
|
||||
return fmt.Errorf("can't append to issuesEvents batch: %s", err)
|
||||
}
|
||||
if err := c.batches["issues"].Append(
|
||||
uint16(session.ProjectID),
|
||||
issueID,
|
||||
msg.Type,
|
||||
msg.ContextString,
|
||||
); err != nil {
|
||||
c.checkError("issues", err)
|
||||
return fmt.Errorf("can't append to issues batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) InsertWebSession(session *sessions.Session) error {
|
||||
if session.Duration == nil {
|
||||
return errors.New("trying to insert session with nil duration")
|
||||
}
|
||||
if err := c.batches["sessions"].Append(
|
||||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
session.UserID,
|
||||
session.UserUUID,
|
||||
session.UserOS,
|
||||
nullableString(session.UserOSVersion),
|
||||
nullableString(session.UserDevice),
|
||||
session.UserDeviceType,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
datetime(session.Timestamp),
|
||||
uint32(*session.Duration),
|
||||
uint16(session.PagesCount),
|
||||
uint16(session.EventsCount),
|
||||
uint16(session.ErrorsCount),
|
||||
uint32(session.IssueScore),
|
||||
session.Referrer,
|
||||
session.IssueTypes,
|
||||
session.TrackerVersion,
|
||||
session.UserBrowser,
|
||||
nullableString(session.UserBrowserVersion),
|
||||
session.Metadata1,
|
||||
session.Metadata2,
|
||||
session.Metadata3,
|
||||
session.Metadata4,
|
||||
session.Metadata5,
|
||||
session.Metadata6,
|
||||
session.Metadata7,
|
||||
session.Metadata8,
|
||||
session.Metadata9,
|
||||
session.Metadata10,
|
||||
session.Timezone,
|
||||
session.UtmSource,
|
||||
session.UtmMedium,
|
||||
session.UtmCampaign,
|
||||
); err != nil {
|
||||
c.checkError("sessions", err)
|
||||
return fmt.Errorf("can't append to sessions batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func extractUrlPath(fullUrl string) string {
|
||||
_, path, query, err := url.GetURLParts(fullUrl)
|
||||
if err != nil {
|
||||
log.Printf("can't parse url: %s", err)
|
||||
return ""
|
||||
}
|
||||
pathQuery := path
|
||||
if query != "" {
|
||||
pathQuery += "?" + query
|
||||
}
|
||||
return strings.ToLower(pathQuery)
|
||||
}
|
||||
|
||||
func (c *connectorImpl) InsertWebPageEvent(session *sessions.Session, msg *messages.PageEvent) error {
|
||||
if err := c.batches["pages"].Append(
|
||||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
msg.MessageID,
|
||||
datetime(msg.Timestamp),
|
||||
msg.URL,
|
||||
nullableUint16(uint16(msg.RequestStart)),
|
||||
nullableUint16(uint16(msg.ResponseStart)),
|
||||
nullableUint16(uint16(msg.ResponseEnd)),
|
||||
nullableUint16(uint16(msg.DomContentLoadedEventStart)),
|
||||
nullableUint16(uint16(msg.DomContentLoadedEventEnd)),
|
||||
nullableUint16(uint16(msg.LoadEventStart)),
|
||||
nullableUint16(uint16(msg.LoadEventEnd)),
|
||||
nullableUint16(uint16(msg.FirstPaint)),
|
||||
nullableUint16(uint16(msg.FirstContentfulPaint)),
|
||||
nullableUint16(uint16(msg.SpeedIndex)),
|
||||
nullableUint16(uint16(msg.VisuallyComplete)),
|
||||
nullableUint16(uint16(msg.TimeToInteractive)),
|
||||
extractUrlPath(msg.URL),
|
||||
"LOCATION",
|
||||
); err != nil {
|
||||
c.checkError("pages", err)
|
||||
return fmt.Errorf("can't append to pages batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) InsertWebClickEvent(session *sessions.Session, msg *messages.MouseClick) error {
|
||||
if msg.Label == "" {
|
||||
return nil
|
||||
}
|
||||
var nX *float32 = nil
|
||||
var nY *float32 = nil
|
||||
if msg.NormalizedX != 101 && msg.NormalizedY != 101 {
|
||||
// To support previous versions of tracker
|
||||
if msg.NormalizedX <= 100 && msg.NormalizedY <= 100 {
|
||||
msg.NormalizedX *= 100
|
||||
msg.NormalizedY *= 100
|
||||
}
|
||||
normalizedX := float32(msg.NormalizedX) / 100.0
|
||||
normalizedY := float32(msg.NormalizedY) / 100.0
|
||||
nXVal := normalizedX
|
||||
nX = &nXVal
|
||||
nYVal := normalizedY
|
||||
nY = &nYVal
|
||||
}
|
||||
if err := c.batches["clicks"].Append(
|
||||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
msg.MsgID(),
|
||||
datetime(msg.Timestamp),
|
||||
msg.Label,
|
||||
nullableUint32(uint32(msg.HesitationTime)),
|
||||
"CLICK",
|
||||
msg.Selector,
|
||||
nX,
|
||||
nY,
|
||||
msg.Url,
|
||||
extractUrlPath(msg.Url),
|
||||
); err != nil {
|
||||
c.checkError("clicks", err)
|
||||
return fmt.Errorf("can't append to clicks batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) InsertWebErrorEvent(session *sessions.Session, msg *types.ErrorEvent) error {
|
||||
keys, values := make([]string, 0, len(msg.Tags)), make([]*string, 0, len(msg.Tags))
|
||||
for k, v := range msg.Tags {
|
||||
keys = append(keys, k)
|
||||
values = append(values, v)
|
||||
}
|
||||
// Check error source before insert to avoid panic from clickhouse lib
|
||||
switch msg.Source {
|
||||
case "js_exception", "bugsnag", "cloudwatch", "datadog", "elasticsearch", "newrelic", "rollbar", "sentry", "stackdriver", "sumologic":
|
||||
default:
|
||||
return fmt.Errorf("unknown error source: %s", msg.Source)
|
||||
}
|
||||
msgID, _ := msg.ID(session.ProjectID)
|
||||
// Insert event to batch
|
||||
if err := c.batches["errors"].Append(
|
||||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
msg.MessageID,
|
||||
datetime(msg.Timestamp),
|
||||
msg.Source,
|
||||
nullableString(msg.Name),
|
||||
msg.Message,
|
||||
msgID,
|
||||
"ERROR",
|
||||
keys,
|
||||
values,
|
||||
); err != nil {
|
||||
c.checkError("errors", err)
|
||||
return fmt.Errorf("can't append to errors batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) InsertWebPerformanceTrackAggr(session *sessions.Session, msg *messages.PerformanceTrackAggr) error {
|
||||
var timestamp uint64 = (msg.TimestampStart + msg.TimestampEnd) / 2
|
||||
if err := c.batches["performance"].Append(
|
||||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
uint64(0), // TODO: find messageID for performance events
|
||||
datetime(timestamp),
|
||||
nullableString(msg.Meta().Url),
|
||||
uint8(msg.MinFPS),
|
||||
uint8(msg.AvgFPS),
|
||||
uint8(msg.MaxFPS),
|
||||
uint8(msg.MinCPU),
|
||||
uint8(msg.AvgCPU),
|
||||
uint8(msg.MaxCPU),
|
||||
msg.MinTotalJSHeapSize,
|
||||
msg.AvgTotalJSHeapSize,
|
||||
msg.MaxTotalJSHeapSize,
|
||||
msg.MinUsedJSHeapSize,
|
||||
msg.AvgUsedJSHeapSize,
|
||||
msg.MaxUsedJSHeapSize,
|
||||
"PERFORMANCE",
|
||||
); err != nil {
|
||||
c.checkError("performance", err)
|
||||
return fmt.Errorf("can't append to performance batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) InsertAutocomplete(session *sessions.Session, msgType, msgValue string) error {
|
||||
if len(msgValue) == 0 {
|
||||
return nil
|
||||
}
|
||||
if err := c.batches["autocompletes"].Append(
|
||||
uint16(session.ProjectID),
|
||||
msgType,
|
||||
msgValue,
|
||||
); err != nil {
|
||||
c.checkError("autocompletes", err)
|
||||
return fmt.Errorf("can't append to autocompletes batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) InsertRequest(session *sessions.Session, msg *messages.NetworkRequest, savePayload bool) error {
|
||||
urlMethod := url.EnsureMethod(msg.Method)
|
||||
if urlMethod == "" {
|
||||
return fmt.Errorf("can't parse http method. sess: %d, method: %s", session.SessionID, msg.Method)
|
||||
}
|
||||
var request, response *string
|
||||
if savePayload {
|
||||
request = &msg.Request
|
||||
response = &msg.Response
|
||||
}
|
||||
if err := c.batches["requests"].Append(
|
||||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
msg.Meta().Index,
|
||||
datetime(uint64(msg.Meta().Timestamp)),
|
||||
msg.URL,
|
||||
request,
|
||||
response,
|
||||
uint16(msg.Status),
|
||||
url.EnsureMethod(msg.Method),
|
||||
uint16(msg.Duration),
|
||||
msg.Status < 400,
|
||||
"REQUEST",
|
||||
uint32(msg.TransferredBodySize),
|
||||
extractUrlPath(msg.URL),
|
||||
); err != nil {
|
||||
c.checkError("requests", err)
|
||||
return fmt.Errorf("can't append to requests batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) InsertCustom(session *sessions.Session, msg *messages.CustomEvent) error {
|
||||
if err := c.batches["custom"].Append(
|
||||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
msg.Meta().Index,
|
||||
datetime(uint64(msg.Meta().Timestamp)),
|
||||
msg.Name,
|
||||
msg.Payload,
|
||||
"CUSTOM",
|
||||
); err != nil {
|
||||
c.checkError("custom", err)
|
||||
return fmt.Errorf("can't append to custom batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) InsertGraphQL(session *sessions.Session, msg *messages.GraphQL) error {
|
||||
if err := c.batches["graphql"].Append(
|
||||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
msg.Meta().Index,
|
||||
datetime(uint64(msg.Meta().Timestamp)),
|
||||
msg.OperationName,
|
||||
nullableString(msg.Variables),
|
||||
nullableString(msg.Response),
|
||||
"GRAPHQL",
|
||||
); err != nil {
|
||||
c.checkError("graphql", err)
|
||||
return fmt.Errorf("can't append to graphql batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Mobile events
|
||||
|
||||
func (c *connectorImpl) InsertMobileSession(session *sessions.Session) error {
|
||||
if session.Duration == nil {
|
||||
return errors.New("trying to insert mobile session with nil duration")
|
||||
}
|
||||
if err := c.batches["ios_sessions"].Append(
|
||||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
session.UserID,
|
||||
session.UserUUID,
|
||||
session.UserOS,
|
||||
nullableString(session.UserOSVersion),
|
||||
nullableString(session.UserDevice),
|
||||
session.UserDeviceType,
|
||||
session.UserCountry,
|
||||
session.UserState,
|
||||
session.UserCity,
|
||||
datetime(session.Timestamp),
|
||||
uint32(*session.Duration),
|
||||
uint16(session.PagesCount),
|
||||
uint16(session.EventsCount),
|
||||
uint16(session.ErrorsCount),
|
||||
uint32(session.IssueScore),
|
||||
session.Referrer,
|
||||
session.IssueTypes,
|
||||
session.TrackerVersion,
|
||||
session.UserBrowser,
|
||||
nullableString(session.UserBrowserVersion),
|
||||
session.Metadata1,
|
||||
session.Metadata2,
|
||||
session.Metadata3,
|
||||
session.Metadata4,
|
||||
session.Metadata5,
|
||||
session.Metadata6,
|
||||
session.Metadata7,
|
||||
session.Metadata8,
|
||||
session.Metadata9,
|
||||
session.Metadata10,
|
||||
"ios",
|
||||
session.Timezone,
|
||||
); err != nil {
|
||||
c.checkError("ios_sessions", err)
|
||||
return fmt.Errorf("can't append to sessions batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) InsertMobileCustom(session *sessions.Session, msg *messages.MobileEvent) error {
|
||||
if err := c.batches["ios_custom"].Append(
|
||||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
msg.Meta().Index,
|
||||
datetime(uint64(msg.Meta().Timestamp)),
|
||||
msg.Name,
|
||||
msg.Payload,
|
||||
"CUSTOM",
|
||||
); err != nil {
|
||||
c.checkError("ios_custom", err)
|
||||
return fmt.Errorf("can't append to mobile custom batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) InsertMobileClick(session *sessions.Session, msg *messages.MobileClickEvent) error {
|
||||
if msg.Label == "" {
|
||||
return nil
|
||||
}
|
||||
if err := c.batches["ios_clicks"].Append(
|
||||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
msg.MsgID(),
|
||||
datetime(msg.Timestamp),
|
||||
msg.Label,
|
||||
"TAP",
|
||||
); err != nil {
|
||||
c.checkError("ios_clicks", err)
|
||||
return fmt.Errorf("can't append to mobile clicks batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) InsertMobileSwipe(session *sessions.Session, msg *messages.MobileSwipeEvent) error {
|
||||
if msg.Label == "" {
|
||||
return nil
|
||||
}
|
||||
if err := c.batches["ios_swipes"].Append(
|
||||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
msg.MsgID(),
|
||||
datetime(msg.Timestamp),
|
||||
msg.Label,
|
||||
nullableString(msg.Direction),
|
||||
"SWIPE",
|
||||
); err != nil {
|
||||
c.checkError("ios_clicks", err)
|
||||
return fmt.Errorf("can't append to mobile clicks batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) InsertMobileInput(session *sessions.Session, msg *messages.MobileInputEvent) error {
|
||||
if msg.Label == "" {
|
||||
return nil
|
||||
}
|
||||
if err := c.batches["ios_inputs"].Append(
|
||||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
msg.MsgID(),
|
||||
datetime(msg.Timestamp),
|
||||
msg.Label,
|
||||
"INPUT",
|
||||
); err != nil {
|
||||
c.checkError("ios_inputs", err)
|
||||
return fmt.Errorf("can't append to mobile inputs batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) InsertMobileRequest(session *sessions.Session, msg *messages.MobileNetworkCall, savePayload bool) error {
|
||||
urlMethod := url.EnsureMethod(msg.Method)
|
||||
if urlMethod == "" {
|
||||
return fmt.Errorf("can't parse http method. sess: %d, method: %s", session.SessionID, msg.Method)
|
||||
}
|
||||
var request, response *string
|
||||
if savePayload {
|
||||
request = &msg.Request
|
||||
response = &msg.Response
|
||||
}
|
||||
if err := c.batches["ios_requests"].Append(
|
||||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
msg.Meta().Index,
|
||||
datetime(uint64(msg.Meta().Timestamp)),
|
||||
msg.URL,
|
||||
request,
|
||||
response,
|
||||
uint16(msg.Status),
|
||||
url.EnsureMethod(msg.Method),
|
||||
uint16(msg.Duration),
|
||||
msg.Status < 400,
|
||||
"REQUEST",
|
||||
); err != nil {
|
||||
c.checkError("ios_requests", err)
|
||||
return fmt.Errorf("can't append to mobile requests batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) InsertMobileCrash(session *sessions.Session, msg *messages.MobileCrash) error {
|
||||
if err := c.batches["ios_crashes"].Append(
|
||||
session.SessionID,
|
||||
uint16(session.ProjectID),
|
||||
msg.MsgID(),
|
||||
datetime(msg.Timestamp),
|
||||
msg.Name,
|
||||
msg.Reason,
|
||||
msg.Stacktrace,
|
||||
"CRASH",
|
||||
); err != nil {
|
||||
c.checkError("ios_crashes", err)
|
||||
return fmt.Errorf("can't append to mobile crashges batch: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -19,20 +19,17 @@ type Conn struct {
|
|||
Pool pool.Pool
|
||||
batches *batch.BatchSet
|
||||
bulks *BulkSet
|
||||
chConn CH // hack for autocomplete inserts, TODO: rewrite
|
||||
chConn CH
|
||||
}
|
||||
|
||||
func (conn *Conn) SetClickHouse(ch CH) {
|
||||
conn.chConn = ch
|
||||
}
|
||||
|
||||
func NewConn(log logger.Logger, pool pool.Pool) *Conn {
|
||||
func NewConn(log logger.Logger, pool pool.Pool, ch CH) *Conn {
|
||||
if pool == nil {
|
||||
log.Fatal(context.Background(), "pg pool is empty")
|
||||
}
|
||||
return &Conn{
|
||||
log: log,
|
||||
Pool: pool,
|
||||
chConn: ch,
|
||||
bulks: NewBulkSet(log, pool),
|
||||
batches: batch.NewBatchSet(log, pool),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ func (d *dataDogClient) FetchSessionData(credentials interface{}, sessionID uint
|
|||
// Not a struct, will try to parse as JSON string
|
||||
strCfg, ok := credentials.(map[string]interface{})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid credentials, got: %+v", credentials)
|
||||
return nil, fmt.Errorf("invalid credentials")
|
||||
}
|
||||
cfg = datadogConfig{}
|
||||
if site, ok := strCfg["site"].(string); ok {
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ func (d *dynatraceClient) FetchSessionData(credentials interface{}, sessionID ui
|
|||
if !ok {
|
||||
strCfg, ok := credentials.(map[string]interface{})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid credentials, got: %+v", credentials)
|
||||
return nil, fmt.Errorf("invalid credentials")
|
||||
}
|
||||
cfg = dynatraceConfig{}
|
||||
if val, ok := strCfg["environment"].(string); ok {
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@ import (
|
|||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"strings"
|
||||
|
||||
"github.com/elastic/go-elasticsearch/v8"
|
||||
|
|
@ -29,7 +28,7 @@ func (e *elasticsearchClient) FetchSessionData(credentials interface{}, sessionI
|
|||
if !ok {
|
||||
strCfg, ok := credentials.(map[string]interface{})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid credentials, got: %+v", credentials)
|
||||
return nil, fmt.Errorf("invalid credentials")
|
||||
}
|
||||
cfg = elasticsearchConfig{}
|
||||
if val, ok := strCfg["url"].(string); ok {
|
||||
|
|
@ -55,7 +54,7 @@ func (e *elasticsearchClient) FetchSessionData(credentials interface{}, sessionI
|
|||
// Create Elasticsearch client
|
||||
es, err := elasticsearch.NewClient(clientCfg)
|
||||
if err != nil {
|
||||
log.Fatalf("Error creating the client: %s", err)
|
||||
return nil, fmt.Errorf("error creating the client: %s", err)
|
||||
}
|
||||
|
||||
var buf strings.Builder
|
||||
|
|
@ -79,17 +78,17 @@ func (e *elasticsearchClient) FetchSessionData(credentials interface{}, sessionI
|
|||
es.Search.WithTrackTotalHits(true),
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatalf("Error getting response: %s", err)
|
||||
return nil, fmt.Errorf("error getting response: %s", err)
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
if res.IsError() {
|
||||
log.Fatalf("Error: %s", res.String())
|
||||
return nil, fmt.Errorf("error: %s", res.String())
|
||||
}
|
||||
|
||||
var r map[string]interface{}
|
||||
if err := json.NewDecoder(res.Body).Decode(&r); err != nil {
|
||||
log.Fatalf("Error parsing the response body: %s", err)
|
||||
return nil, fmt.Errorf("error parsing the response body: %s", err)
|
||||
}
|
||||
if r["hits"] == nil {
|
||||
return nil, fmt.Errorf("no logs found")
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import (
|
|||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
)
|
||||
|
|
@ -35,7 +34,7 @@ func (s *sentryClient) FetchSessionData(credentials interface{}, sessionID uint6
|
|||
if !ok {
|
||||
strCfg, ok := credentials.(map[string]interface{})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid credentials, got: %+v", credentials)
|
||||
return nil, fmt.Errorf("invalid credentials")
|
||||
}
|
||||
cfg = sentryConfig{}
|
||||
if val, ok := strCfg["organization_slug"].(string); ok {
|
||||
|
|
@ -62,7 +61,7 @@ func (s *sentryClient) FetchSessionData(credentials interface{}, sessionID uint6
|
|||
// Create a new request
|
||||
req, err := http.NewRequest("GET", requestUrl, nil)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create request: %v", err)
|
||||
return nil, fmt.Errorf("failed to create request: %v", err)
|
||||
}
|
||||
|
||||
// Add Authorization header
|
||||
|
|
@ -72,26 +71,26 @@ func (s *sentryClient) FetchSessionData(credentials interface{}, sessionID uint6
|
|||
client := &http.Client{}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to send request: %v", err)
|
||||
return nil, fmt.Errorf("failed to send request: %v", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
// Check if the response status is OK
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
log.Fatalf("Failed to fetch logs, status code: %v", resp.StatusCode)
|
||||
return nil, fmt.Errorf("failed to fetch logs, status code: %v", resp.StatusCode)
|
||||
}
|
||||
|
||||
// Read the response body
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to read response body: %v", err)
|
||||
return nil, fmt.Errorf("failed to read response body: %v", err)
|
||||
}
|
||||
|
||||
// Parse the JSON response
|
||||
var events []SentryEvent
|
||||
err = json.Unmarshal(body, &events)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to parse JSON: %v", err)
|
||||
return nil, fmt.Errorf("failed to parse JSON: %v", err)
|
||||
}
|
||||
if events == nil || len(events) == 0 {
|
||||
return nil, fmt.Errorf("no logs found")
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ type Sessions interface {
|
|||
AddUnStarted(session *UnStartedSession) error
|
||||
AddCached(sessionID uint64, data map[string]string) error
|
||||
Get(sessionID uint64) (*Session, error)
|
||||
GetUpdated(sessionID uint64) (*Session, error)
|
||||
GetUpdated(sessionID uint64, keepInCache bool) (*Session, error)
|
||||
GetCached(sessionID uint64) (map[string]string, error)
|
||||
GetDuration(sessionID uint64) (uint64, error)
|
||||
UpdateDuration(sessionID uint64, timestamp uint64) (uint64, error)
|
||||
|
|
@ -104,11 +104,14 @@ func (s *sessionsImpl) Get(sessionID uint64) (*Session, error) {
|
|||
}
|
||||
|
||||
// Special method for clickhouse connector
|
||||
func (s *sessionsImpl) GetUpdated(sessionID uint64) (*Session, error) {
|
||||
func (s *sessionsImpl) GetUpdated(sessionID uint64, keepInCache bool) (*Session, error) {
|
||||
session, err := s.getFromDB(sessionID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !keepInCache {
|
||||
return session, nil
|
||||
}
|
||||
if err := s.cache.Set(session); err != nil {
|
||||
ctx := context.WithValue(context.Background(), "sessionID", sessionID)
|
||||
s.log.Warn(ctx, "failed to cache session: %s", err)
|
||||
|
|
|
|||
|
|
@ -1,19 +1,19 @@
|
|||
package spot
|
||||
|
||||
import (
|
||||
"openreplay/backend/pkg/metrics/web"
|
||||
"openreplay/backend/pkg/server/tracer"
|
||||
"time"
|
||||
|
||||
"openreplay/backend/internal/config/spot"
|
||||
"openreplay/backend/pkg/db/postgres/pool"
|
||||
"openreplay/backend/pkg/flakeid"
|
||||
"openreplay/backend/pkg/logger"
|
||||
"openreplay/backend/pkg/metrics/web"
|
||||
"openreplay/backend/pkg/objectstorage/store"
|
||||
"openreplay/backend/pkg/server/api"
|
||||
"openreplay/backend/pkg/server/auth"
|
||||
"openreplay/backend/pkg/server/keys"
|
||||
"openreplay/backend/pkg/server/limiter"
|
||||
"openreplay/backend/pkg/server/tracer"
|
||||
spotAPI "openreplay/backend/pkg/spot/api"
|
||||
"openreplay/backend/pkg/spot/service"
|
||||
"openreplay/backend/pkg/spot/transcoder"
|
||||
|
|
|
|||
49
ee/api/.gitignore
vendored
49
ee/api/.gitignore
vendored
|
|
@ -184,47 +184,35 @@ Pipfile.lock
|
|||
/build.sh
|
||||
/build_alerts.sh
|
||||
/build_crons.sh
|
||||
/chalicelib/core/alerts.py
|
||||
/chalicelib/core/announcements.py
|
||||
/chalicelib/core/assist.py
|
||||
/chalicelib/core/authorizers.py
|
||||
/chalicelib/core/autocomplete.py
|
||||
/chalicelib/core/autocomplete/*
|
||||
/chalicelib/core/canvas.py
|
||||
/chalicelib/core/collaboration_base.py
|
||||
/chalicelib/core/collaboration_msteams.py
|
||||
/chalicelib/core/collaboration_slack.py
|
||||
/chalicelib/core/collaborations/*
|
||||
/chalicelib/core/countries.py
|
||||
/chalicelib/core/metrics.py
|
||||
/chalicelib/core/custom_metrics.py
|
||||
/chalicelib/core/custom_metrics_predefined.py
|
||||
/chalicelib/core/dashboards.py
|
||||
/chalicelib/core/errors_favorite.py
|
||||
/chalicelib/core/events_mobile.py
|
||||
/chalicelib/core/feature_flags.py
|
||||
/chalicelib/core/funnels.py
|
||||
/chalicelib/core/integration_base.py
|
||||
/chalicelib/core/integration_base_issue.py
|
||||
/chalicelib/core/integration_github.py
|
||||
/chalicelib/core/integration_github_issue.py
|
||||
/chalicelib/core/integration_jira_cloud.py
|
||||
/chalicelib/core/integration_jira_cloud_issue.py
|
||||
/chalicelib/core/integrations_manager.py
|
||||
/chalicelib/core/issue_tracking/*.py
|
||||
/chalicelib/core/issues.py
|
||||
/chalicelib/core/jobs.py
|
||||
/chalicelib/core/log_tool_bugsnag.py
|
||||
/chalicelib/core/log_tool_cloudwatch.py
|
||||
/chalicelib/core/log_tool_datadog.py
|
||||
/chalicelib/core/log_tool_elasticsearch.py
|
||||
/chalicelib/core/log_tool_newrelic.py
|
||||
/chalicelib/core/log_tool_rollbar.py
|
||||
/chalicelib/core/log_tool_sentry.py
|
||||
/chalicelib/core/log_tool_stackdriver.py
|
||||
/chalicelib/core/log_tool_sumologic.py
|
||||
/chalicelib/core/log_tools/*.py
|
||||
/chalicelib/core/metadata.py
|
||||
/chalicelib/core/mobile.py
|
||||
/chalicelib/core/performance_event.py
|
||||
/chalicelib/core/saved_search.py
|
||||
/chalicelib/core/sessions.py
|
||||
/chalicelib/core/sessions_assignments.py
|
||||
/chalicelib/core/sessions_mobs.py
|
||||
/chalicelib/core/sessions/sessions.py
|
||||
/chalicelib/core/sessions/sessions_ch.py
|
||||
/chalicelib/core/sessions/sessions_assignments.py
|
||||
/chalicelib/core/sessions/sessions_metas.py
|
||||
/chalicelib/core/sessions/sessions_mobs.py
|
||||
/chalicelib/core/sessions/performance_event.py
|
||||
/chalicelib/core/sessions/unprocessed_sessions.py
|
||||
/chalicelib/core/significance.py
|
||||
/chalicelib/core/socket_ios.py
|
||||
/chalicelib/core/sourcemaps.py
|
||||
|
|
@ -276,6 +264,15 @@ Pipfile.lock
|
|||
/chalicelib/utils/or_cache/
|
||||
/routers/subs/health.py
|
||||
/chalicelib/core/spot.py
|
||||
/chalicelib/core/unprocessed_sessions.py
|
||||
/run-db_init-dev.sh
|
||||
/.dev/
|
||||
/chalicelib/core/product_anaytics2.py
|
||||
/chalicelib/utils/ch_client.py
|
||||
/chalicelib/utils/ch_client_exp.py
|
||||
/routers/subs/product_anaytics.py
|
||||
/chalicelib/core/alerts/__init__.py
|
||||
/chalicelib/core/alerts/alerts.py
|
||||
/chalicelib/core/alerts/alerts_processor.py
|
||||
/chalicelib/core/alerts/alerts_processor_ch.py
|
||||
/chalicelib/core/alerts/alerts_listener.py
|
||||
/chalicelib/core/alerts/modules/helpers.py
|
||||
|
|
|
|||
|
|
@ -4,26 +4,27 @@ verify_ssl = true
|
|||
name = "pypi"
|
||||
|
||||
[packages]
|
||||
urllib3 = "==1.26.16"
|
||||
urllib3 = "==2.2.3"
|
||||
requests = "==2.32.3"
|
||||
boto3 = "==1.35.60"
|
||||
pyjwt = "==2.9.0"
|
||||
boto3 = "==1.35.76"
|
||||
pyjwt = "==2.10.1"
|
||||
psycopg2-binary = "==2.9.10"
|
||||
psycopg = {extras = ["binary", "pool"], version = "==3.2.3"}
|
||||
psycopg = {extras = ["pool", "binary"], version = "==3.2.3"}
|
||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
|
||||
clickhouse-connect = "==0.8.9"
|
||||
elasticsearch = "==8.16.0"
|
||||
jira = "==3.8.0"
|
||||
cachetools = "==5.5.0"
|
||||
fastapi = "==0.115.5"
|
||||
uvicorn = {extras = ["standard"], version = "==0.32.0"}
|
||||
fastapi = "==0.115.6"
|
||||
uvicorn = {extras = ["standard"], version = "==0.32.1"}
|
||||
gunicorn = "==23.0.0"
|
||||
python-decouple = "==3.8"
|
||||
pydantic = {extras = ["email"], version = "==2.9.2"}
|
||||
apscheduler = "==3.10.4"
|
||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
|
||||
pydantic = {extras = ["email"], version = "==2.10.3"}
|
||||
apscheduler = "==3.11.0"
|
||||
redis = "==5.2.1"
|
||||
python3-saml = "==1.16.0"
|
||||
python-multipart = "==0.0.17"
|
||||
redis = "==5.2.0"
|
||||
azure-storage-blob = "==12.23.1"
|
||||
azure-storage-blob = "==12.24.0"
|
||||
|
||||
[dev-packages]
|
||||
|
||||
|
|
|
|||
|
|
@ -17,11 +17,11 @@ from starlette.responses import StreamingResponse, JSONResponse
|
|||
from chalicelib.core import traces
|
||||
from chalicelib.utils import events_queue
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.utils import pg_client, ch_client
|
||||
from crons import core_crons, ee_crons, core_dynamic_crons
|
||||
from routers import core, core_dynamic
|
||||
from routers import ee
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
|
||||
from routers.subs import v1_api_ee
|
||||
|
||||
if config("ENABLE_SSO", cast=bool, default=True):
|
||||
|
|
@ -48,6 +48,7 @@ async def lifespan(app: FastAPI):
|
|||
app.schedule = AsyncIOScheduler()
|
||||
app.queue_system = queue.Queue()
|
||||
await pg_client.init()
|
||||
await ch_client.init()
|
||||
await events_queue.init()
|
||||
app.schedule.start()
|
||||
|
||||
|
|
@ -149,6 +150,10 @@ app.include_router(spot.public_app)
|
|||
app.include_router(spot.app)
|
||||
app.include_router(spot.app_apikey)
|
||||
|
||||
app.include_router(product_anaytics.public_app)
|
||||
app.include_router(product_anaytics.app)
|
||||
app.include_router(product_anaytics.app_apikey)
|
||||
|
||||
if config("ENABLE_SSO", cast=bool, default=True):
|
||||
app.include_router(saml.public_app)
|
||||
app.include_router(saml.app)
|
||||
|
|
|
|||
|
|
@ -1,46 +1,33 @@
|
|||
from decouple import config
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
from decouple import config
|
||||
|
||||
from . import sessions as sessions_legacy
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
logging.info(">>> Using experimental sessions search")
|
||||
from . import sessions_exp as sessions
|
||||
else:
|
||||
from . import sessions as sessions
|
||||
logger = logging.getLogger(__name__)
|
||||
from . import custom_metrics as custom_metrics_legacy
|
||||
from . import custom_metrics_ee as custom_metrics
|
||||
from . import metrics_ch as metrics
|
||||
from . import metrics as metrics_legacy
|
||||
|
||||
if config("EXP_AUTOCOMPLETE", cast=bool, default=False):
|
||||
logging.info(">>> Using experimental autocomplete")
|
||||
from . import autocomplete_exp as autocomplete
|
||||
logger.info(">>> Using experimental autocomplete")
|
||||
else:
|
||||
from . import autocomplete as autocomplete
|
||||
|
||||
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
|
||||
logging.info(">>> Using experimental error search")
|
||||
logger.info(">>> Using experimental error search")
|
||||
from . import errors as errors_legacy
|
||||
from . import errors_exp as errors
|
||||
|
||||
if config("EXP_ERRORS_GET", cast=bool, default=False):
|
||||
logging.info(">>> Using experimental error get")
|
||||
logger.info(">>> Using experimental error get")
|
||||
else:
|
||||
from . import errors as errors
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH_METRIC", cast=bool, default=False):
|
||||
logging.info(">>> Using experimental sessions search for metrics")
|
||||
|
||||
if config("EXP_ALERTS", cast=bool, default=False):
|
||||
logging.info(">>> Using experimental alerts")
|
||||
from . import alerts_processor_exp as alerts_processor
|
||||
else:
|
||||
from . import alerts_processor as alerts_processor
|
||||
logger.info(">>> Using experimental sessions search for metrics")
|
||||
|
||||
if config("EXP_FUNNELS", cast=bool, default=False):
|
||||
logging.info(">>> Using experimental funnels")
|
||||
if not config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
from . import sessions as sessions_legacy
|
||||
|
||||
logger.info(">>> Using experimental funnels")
|
||||
from . import significance_exp as significance
|
||||
else:
|
||||
from . import significance as significance
|
||||
|
|
|
|||
16
ee/api/chalicelib/core/alerts/modules/__init__.py
Normal file
16
ee/api/chalicelib/core/alerts/modules/__init__.py
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
from decouple import config
|
||||
|
||||
TENANT_ID = "tenant_id"
|
||||
if config("EXP_ALERTS", cast=bool, default=False):
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
from chalicelib.core.sessions import sessions
|
||||
else:
|
||||
from chalicelib.core.sessions import sessions_ch as sessions
|
||||
else:
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
from chalicelib.core.sessions import sessions_ch as sessions
|
||||
else:
|
||||
from chalicelib.core.sessions import sessions
|
||||
|
||||
|
||||
from . import helpers as alert_helpers
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
from chalicelib.utils import pg_client, helper
|
||||
|
||||
|
||||
def get_all_alerts():
|
||||
with pg_client.PostgresClient(long_query=True) as cur:
|
||||
query = """SELECT tenant_id,
|
||||
alert_id,
|
||||
projects.project_id,
|
||||
projects.name AS project_name,
|
||||
detection_method,
|
||||
query,
|
||||
options,
|
||||
(EXTRACT(EPOCH FROM alerts.created_at) * 1000)::BIGINT AS created_at,
|
||||
alerts.name,
|
||||
alerts.series_id,
|
||||
filter,
|
||||
change,
|
||||
COALESCE(metrics.name || '.' || (COALESCE(metric_series.name, 'series ' || index)) || '.count',
|
||||
query ->> 'left') AS series_name
|
||||
FROM public.alerts
|
||||
INNER JOIN projects USING (project_id)
|
||||
LEFT JOIN metric_series USING (series_id)
|
||||
LEFT JOIN metrics USING (metric_id)
|
||||
WHERE alerts.deleted_at ISNULL
|
||||
AND alerts.active
|
||||
AND projects.active
|
||||
AND projects.deleted_at ISNULL
|
||||
AND (alerts.series_id ISNULL OR metric_series.deleted_at ISNULL)
|
||||
ORDER BY alerts.created_at;"""
|
||||
cur.execute(query=query)
|
||||
all_alerts = helper.list_to_camel_case(cur.fetchall())
|
||||
return all_alerts
|
||||
|
|
@ -1,242 +0,0 @@
|
|||
import decimal
|
||||
import logging
|
||||
|
||||
from decouple import config
|
||||
from pydantic_core._pydantic_core import ValidationError
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import alerts
|
||||
from chalicelib.core import alerts_listener
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
from chalicelib.core import sessions_legacy as sessions
|
||||
else:
|
||||
from chalicelib.core import sessions
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
|
||||
LeftToDb = {
|
||||
schemas.AlertColumn.PERFORMANCE__DOM_CONTENT_LOADED__AVERAGE: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "COALESCE(AVG(NULLIF(dom_content_loaded_time ,0)),0)"},
|
||||
schemas.AlertColumn.PERFORMANCE__FIRST_MEANINGFUL_PAINT__AVERAGE: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "COALESCE(AVG(NULLIF(first_contentful_paint_time,0)),0)"},
|
||||
schemas.AlertColumn.PERFORMANCE__PAGE_LOAD_TIME__AVERAGE: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(load_time ,0))"},
|
||||
schemas.AlertColumn.PERFORMANCE__DOM_BUILD_TIME__AVERAGE: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "AVG(NULLIF(dom_building_time,0))"},
|
||||
schemas.AlertColumn.PERFORMANCE__SPEED_INDEX__AVERAGE: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(speed_index,0))"},
|
||||
schemas.AlertColumn.PERFORMANCE__PAGE_RESPONSE_TIME__AVERAGE: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "AVG(NULLIF(response_time,0))"},
|
||||
schemas.AlertColumn.PERFORMANCE__TTFB__AVERAGE: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "AVG(NULLIF(first_paint_time,0))"},
|
||||
schemas.AlertColumn.PERFORMANCE__TIME_TO_RENDER__AVERAGE: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "AVG(NULLIF(visually_complete,0))"},
|
||||
schemas.AlertColumn.PERFORMANCE__CRASHES__COUNT: {
|
||||
"table": "public.sessions",
|
||||
"formula": "COUNT(DISTINCT session_id)",
|
||||
"condition": "errors_count > 0 AND duration>0"},
|
||||
schemas.AlertColumn.ERRORS__JAVASCRIPT__COUNT: {
|
||||
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
|
||||
"formula": "COUNT(DISTINCT session_id)", "condition": "source='js_exception'", "joinSessions": False},
|
||||
schemas.AlertColumn.ERRORS__BACKEND__COUNT: {
|
||||
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
|
||||
"formula": "COUNT(DISTINCT session_id)", "condition": "source!='js_exception'", "joinSessions": False},
|
||||
}
|
||||
|
||||
# This is the frequency of execution for each threshold
|
||||
TimeInterval = {
|
||||
15: 3,
|
||||
30: 5,
|
||||
60: 10,
|
||||
120: 20,
|
||||
240: 30,
|
||||
1440: 60,
|
||||
}
|
||||
|
||||
|
||||
def can_check(a) -> bool:
|
||||
now = TimeUTC.now()
|
||||
|
||||
repetitionBase = a["options"]["currentPeriod"] \
|
||||
if a["detectionMethod"] == schemas.AlertDetectionMethod.CHANGE \
|
||||
and a["options"]["currentPeriod"] > a["options"]["previousPeriod"] \
|
||||
else a["options"]["previousPeriod"]
|
||||
|
||||
if TimeInterval.get(repetitionBase) is None:
|
||||
logging.error(f"repetitionBase: {repetitionBase} NOT FOUND")
|
||||
return False
|
||||
|
||||
return (a["options"]["renotifyInterval"] <= 0 or
|
||||
a["options"].get("lastNotification") is None or
|
||||
a["options"]["lastNotification"] <= 0 or
|
||||
((now - a["options"]["lastNotification"]) > a["options"]["renotifyInterval"] * 60 * 1000)) \
|
||||
and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000
|
||||
|
||||
|
||||
def Build(a):
|
||||
now = TimeUTC.now()
|
||||
params = {"project_id": a["projectId"], "now": now}
|
||||
full_args = {}
|
||||
j_s = True
|
||||
main_table = ""
|
||||
if a["seriesId"] is not None:
|
||||
a["filter"]["sort"] = "session_id"
|
||||
a["filter"]["order"] = schemas.SortOrderType.DESC
|
||||
a["filter"]["startDate"] = 0
|
||||
a["filter"]["endDate"] = TimeUTC.now()
|
||||
try:
|
||||
data = schemas.SessionsSearchPayloadSchema.model_validate(a["filter"])
|
||||
except ValidationError:
|
||||
logging.warning("Validation error for:")
|
||||
logging.warning(a["filter"])
|
||||
raise
|
||||
|
||||
full_args, query_part = sessions.search_query_parts(data=data, error_status=None, errors_only=False,
|
||||
issue=None, project_id=a["projectId"], user_id=None,
|
||||
favorite_only=False)
|
||||
subQ = f"""SELECT COUNT(session_id) AS value
|
||||
{query_part}"""
|
||||
else:
|
||||
colDef = LeftToDb[a["query"]["left"]]
|
||||
subQ = f"""SELECT {colDef["formula"]} AS value
|
||||
FROM {colDef["table"]}
|
||||
WHERE project_id = %(project_id)s
|
||||
{"AND " + colDef["condition"] if colDef.get("condition") else ""}"""
|
||||
j_s = colDef.get("joinSessions", True)
|
||||
main_table = colDef["table"]
|
||||
is_ss = main_table == "public.sessions"
|
||||
q = f"""SELECT coalesce(value,0) AS value, coalesce(value,0) {a["query"]["operator"]} {a["query"]["right"]} AS valid"""
|
||||
|
||||
if a["detectionMethod"] == schemas.AlertDetectionMethod.THRESHOLD:
|
||||
if a["seriesId"] is not None:
|
||||
q += f""" FROM ({subQ}) AS stat"""
|
||||
else:
|
||||
q += f""" FROM ({subQ} {"AND timestamp >= %(startDate)s AND timestamp <= %(now)s" if not is_ss else ""}
|
||||
{"AND start_ts >= %(startDate)s AND start_ts <= %(now)s" if j_s else ""}) AS stat"""
|
||||
params = {**params, **full_args, "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000}
|
||||
else:
|
||||
if a["change"] == schemas.AlertDetectionType.CHANGE:
|
||||
if a["seriesId"] is not None:
|
||||
sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
|
||||
sub1 = f"SELECT (({subQ})-({sub2})) AS value"
|
||||
q += f" FROM ( {sub1} ) AS stat"
|
||||
params = {**params, **full_args,
|
||||
"startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000,
|
||||
"timestamp_sub2": TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000}
|
||||
else:
|
||||
sub1 = f"""{subQ} {"AND timestamp >= %(startDate)s AND timestamp <= %(now)s" if not is_ss else ""}
|
||||
{"AND start_ts >= %(startDate)s AND start_ts <= %(now)s" if j_s else ""}"""
|
||||
params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
|
||||
sub2 = f"""{subQ} {"AND timestamp < %(startDate)s AND timestamp >= %(timestamp_sub2)s" if not is_ss else ""}
|
||||
{"AND start_ts < %(startDate)s AND start_ts >= %(timestamp_sub2)s" if j_s else ""}"""
|
||||
params["timestamp_sub2"] = TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000
|
||||
sub1 = f"SELECT (( {sub1} )-( {sub2} )) AS value"
|
||||
q += f" FROM ( {sub1} ) AS stat"
|
||||
|
||||
else:
|
||||
if a["seriesId"] is not None:
|
||||
sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
|
||||
sub1 = f"SELECT (({subQ})/NULLIF(({sub2}),0)-1)*100 AS value"
|
||||
q += f" FROM ({sub1}) AS stat"
|
||||
params = {**params, **full_args,
|
||||
"startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000,
|
||||
"timestamp_sub2": TimeUTC.now() \
|
||||
- (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) \
|
||||
* 60 * 1000}
|
||||
else:
|
||||
sub1 = f"""{subQ} {"AND timestamp >= %(startDate)s AND timestamp <= %(now)s" if not is_ss else ""}
|
||||
{"AND start_ts >= %(startDate)s AND start_ts <= %(now)s" if j_s else ""}"""
|
||||
params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
|
||||
sub2 = f"""{subQ} {"AND timestamp < %(startDate)s AND timestamp >= %(timestamp_sub2)s" if not is_ss else ""}
|
||||
{"AND start_ts < %(startDate)s AND start_ts >= %(timestamp_sub2)s" if j_s else ""}"""
|
||||
params["timestamp_sub2"] = TimeUTC.now() \
|
||||
- (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) * 60 * 1000
|
||||
sub1 = f"SELECT (({sub1})/NULLIF(({sub2}),0)-1)*100 AS value"
|
||||
q += f" FROM ({sub1}) AS stat"
|
||||
|
||||
return q, params
|
||||
|
||||
|
||||
def process():
|
||||
notifications = []
|
||||
all_alerts = alerts_listener.get_all_alerts()
|
||||
with pg_client.PostgresClient() as cur:
|
||||
for alert in all_alerts:
|
||||
if can_check(alert):
|
||||
query, params = Build(alert)
|
||||
try:
|
||||
query = cur.mogrify(query, params)
|
||||
except Exception as e:
|
||||
logging.error(
|
||||
f"!!!Error while building alert query for alertId:{alert['alertId']} name: {alert['name']}")
|
||||
logging.error(e)
|
||||
continue
|
||||
logging.debug(alert)
|
||||
logging.debug(query)
|
||||
try:
|
||||
cur.execute(query)
|
||||
result = cur.fetchone()
|
||||
if result["valid"]:
|
||||
logging.info(f"Valid alert, notifying users, alertId:{alert['alertId']} name: {alert['name']}")
|
||||
notifications.append(generate_notification(alert, result))
|
||||
except Exception as e:
|
||||
logging.error(
|
||||
f"!!!Error while running alert query for alertId:{alert['alertId']} name: {alert['name']}")
|
||||
logging.error(query)
|
||||
logging.error(e)
|
||||
cur = cur.recreate(rollback=True)
|
||||
if len(notifications) > 0:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""UPDATE public.alerts
|
||||
SET options = options||'{{"lastNotification":{TimeUTC.now()}}}'::jsonb
|
||||
WHERE alert_id IN %(ids)s;""", {"ids": tuple([n["alertId"] for n in notifications])}))
|
||||
if len(notifications) > 0:
|
||||
alerts.process_notifications(notifications)
|
||||
|
||||
|
||||
def __format_value(x):
|
||||
if x % 1 == 0:
|
||||
x = int(x)
|
||||
else:
|
||||
x = round(x, 2)
|
||||
return f"{x:,}"
|
||||
|
||||
|
||||
def generate_notification(alert, result):
|
||||
left = __format_value(result['value'])
|
||||
right = __format_value(alert['query']['right'])
|
||||
return {
|
||||
"alertId": alert["alertId"],
|
||||
"tenantId": alert["tenantId"],
|
||||
"title": alert["name"],
|
||||
"description": f"{alert['seriesName']} = {left} ({alert['query']['operator']} {right}).",
|
||||
"buttonText": "Check metrics for more details",
|
||||
"buttonUrl": f"/{alert['projectId']}/metrics",
|
||||
"imageUrl": None,
|
||||
"projectId": alert["projectId"],
|
||||
"projectName": alert["projectName"],
|
||||
"options": {"source": "ALERT", "sourceId": alert["alertId"],
|
||||
"sourceMeta": alert["detectionMethod"],
|
||||
"message": alert["options"]["message"], "projectId": alert["projectId"],
|
||||
"data": {"title": alert["name"],
|
||||
"limitValue": alert["query"]["right"],
|
||||
"actualValue": float(result["value"]) \
|
||||
if isinstance(result["value"], decimal.Decimal) \
|
||||
else result["value"],
|
||||
"operator": alert["query"]["operator"],
|
||||
"trigger": alert["query"]["left"],
|
||||
"alertId": alert["alertId"],
|
||||
"detectionMethod": alert["detectionMethod"],
|
||||
"currentPeriod": alert["options"]["currentPeriod"],
|
||||
"previousPeriod": alert["options"]["previousPeriod"],
|
||||
"createdAt": TimeUTC.now()}},
|
||||
}
|
||||
|
|
@ -1,703 +0,0 @@
|
|||
import json
|
||||
import logging
|
||||
|
||||
from decouple import config
|
||||
from fastapi import HTTPException, status
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import funnels, issues, heatmaps, sessions_insights, sessions_mobs, sessions_favorite, \
|
||||
product_analytics, custom_metrics_predefined
|
||||
from chalicelib.utils import helper, pg_client
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.storage import extra
|
||||
|
||||
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
|
||||
logging.info(">>> Using experimental error search")
|
||||
from . import errors_exp as errors
|
||||
else:
|
||||
from . import errors as errors
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH_METRIC", cast=bool, default=False):
|
||||
from chalicelib.core import sessions
|
||||
else:
|
||||
from chalicelib.core import sessions_legacy as sessions
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# TODO: refactor this to split
|
||||
# timeseries /
|
||||
# table of errors / table of issues / table of browsers / table of devices / table of countries / table of URLs
|
||||
# remove "table of" calls from this function
|
||||
def __try_live(project_id, data: schemas.CardSchema):
|
||||
results = []
|
||||
for i, s in enumerate(data.series):
|
||||
results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
|
||||
view_type=data.view_type, metric_type=data.metric_type,
|
||||
metric_of=data.metric_of, metric_value=data.metric_value))
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def __get_table_of_series(project_id, data: schemas.CardSchema):
|
||||
results = []
|
||||
for i, s in enumerate(data.series):
|
||||
results.append(sessions.search2_table(data=s.filter, project_id=project_id, density=data.density,
|
||||
metric_of=data.metric_of, metric_value=data.metric_value,
|
||||
metric_format=data.metric_format))
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def __get_funnel_chart(project: schemas.ProjectContext, data: schemas.CardFunnel, user_id: int = None):
|
||||
if len(data.series) == 0:
|
||||
return {
|
||||
"stages": [],
|
||||
"totalDropDueToIssues": 0
|
||||
}
|
||||
|
||||
# return funnels.get_top_insights_on_the_fly_widget(project_id=project_id,
|
||||
# data=data.series[0].filter,
|
||||
# metric_format=data.metric_format)
|
||||
return funnels.get_simple_funnel(project=project,
|
||||
data=data.series[0].filter,
|
||||
metric_format=data.metric_format)
|
||||
|
||||
|
||||
def __get_errors_list(project: schemas.ProjectContext, user_id, data: schemas.CardSchema):
|
||||
if len(data.series) == 0:
|
||||
return {
|
||||
"total": 0,
|
||||
"errors": []
|
||||
}
|
||||
return errors.search(data.series[0].filter, project_id=project.project_id, user_id=user_id)
|
||||
|
||||
|
||||
def __get_sessions_list(project: schemas.ProjectContext, user_id, data: schemas.CardSchema):
|
||||
if len(data.series) == 0:
|
||||
logger.debug("empty series")
|
||||
return {
|
||||
"total": 0,
|
||||
"sessions": []
|
||||
}
|
||||
return sessions.search_sessions(data=data.series[0].filter, project_id=project.project_id, user_id=user_id)
|
||||
|
||||
|
||||
def __get_heat_map_chart(project: schemas.ProjectContext, user_id, data: schemas.CardHeatMap,
|
||||
include_mobs: bool = True):
|
||||
if len(data.series) == 0:
|
||||
return None
|
||||
data.series[0].filter.filters += data.series[0].filter.events
|
||||
data.series[0].filter.events = []
|
||||
return heatmaps.search_short_session(project_id=project.project_id, user_id=user_id,
|
||||
data=schemas.HeatMapSessionsSearch(
|
||||
**data.series[0].filter.model_dump()),
|
||||
include_mobs=include_mobs)
|
||||
|
||||
|
||||
# EE only
|
||||
def __get_insights_chart(project: schemas.ProjectContext, data: schemas.CardInsights, user_id: int = None):
|
||||
return sessions_insights.fetch_selected(project_id=project.project_id,
|
||||
data=schemas.GetInsightsSchema(startTimestamp=data.startTimestamp,
|
||||
endTimestamp=data.endTimestamp,
|
||||
metricValue=data.metric_value,
|
||||
series=data.series))
|
||||
|
||||
|
||||
def __get_path_analysis_chart(project: schemas.ProjectContext, user_id: int, data: schemas.CardPathAnalysis):
|
||||
if len(data.series) == 0:
|
||||
data.series.append(
|
||||
schemas.CardPathAnalysisSeriesSchema(startTimestamp=data.startTimestamp, endTimestamp=data.endTimestamp))
|
||||
elif not isinstance(data.series[0].filter, schemas.PathAnalysisSchema):
|
||||
data.series[0].filter = schemas.PathAnalysisSchema()
|
||||
|
||||
return product_analytics.path_analysis(project_id=project.project_id, data=data)
|
||||
|
||||
|
||||
def __get_timeseries_chart(project: schemas.ProjectContext, data: schemas.CardTimeSeries, user_id: int = None):
|
||||
series_charts = __try_live(project_id=project.project_id, data=data)
|
||||
results = [{}] * len(series_charts[0])
|
||||
for i in range(len(results)):
|
||||
for j, series_chart in enumerate(series_charts):
|
||||
results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"],
|
||||
data.series[j].name if data.series[j].name else j + 1: series_chart[i]["count"]}
|
||||
return results
|
||||
|
||||
|
||||
def not_supported(**args):
|
||||
raise Exception("not supported")
|
||||
|
||||
|
||||
def __get_table_of_user_ids(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int = None):
|
||||
return __get_table_of_series(project_id=project.project_id, data=data)
|
||||
|
||||
|
||||
def __get_table_of_sessions(project: schemas.ProjectContext, data: schemas.CardTable, user_id):
|
||||
return __get_sessions_list(project=project, user_id=user_id, data=data)
|
||||
|
||||
|
||||
def __get_table_of_errors(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int):
|
||||
return __get_errors_list(project=project, user_id=user_id, data=data)
|
||||
|
||||
|
||||
def __get_table_of_issues(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int = None):
|
||||
return __get_table_of_series(project_id=project.project_id, data=data)
|
||||
|
||||
|
||||
def __get_table_of_browsers(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int = None):
|
||||
return __get_table_of_series(project_id=project.project_id, data=data)
|
||||
|
||||
|
||||
def __get_table_of_devises(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int = None):
|
||||
return __get_table_of_series(project_id=project.project_id, data=data)
|
||||
|
||||
|
||||
def __get_table_of_countries(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int = None):
|
||||
return __get_table_of_series(project_id=project.project_id, data=data)
|
||||
|
||||
|
||||
def __get_table_of_urls(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int = None):
|
||||
return __get_table_of_series(project_id=project.project_id, data=data)
|
||||
|
||||
|
||||
def __get_table_of_referrers(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int = None):
|
||||
return __get_table_of_series(project_id=project.project_id, data=data)
|
||||
|
||||
|
||||
def __get_table_of_requests(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int = None):
|
||||
return __get_table_of_series(project_id=project.project_id, data=data)
|
||||
|
||||
|
||||
def __get_table_chart(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int):
|
||||
supported = {
|
||||
schemas.MetricOfTable.SESSIONS: __get_table_of_sessions,
|
||||
schemas.MetricOfTable.ERRORS: __get_table_of_errors,
|
||||
schemas.MetricOfTable.USER_ID: __get_table_of_user_ids,
|
||||
schemas.MetricOfTable.ISSUES: __get_table_of_issues,
|
||||
schemas.MetricOfTable.USER_BROWSER: __get_table_of_browsers,
|
||||
schemas.MetricOfTable.USER_DEVICE: __get_table_of_devises,
|
||||
schemas.MetricOfTable.USER_COUNTRY: __get_table_of_countries,
|
||||
schemas.MetricOfTable.VISITED_URL: __get_table_of_urls,
|
||||
schemas.MetricOfTable.REFERRER: __get_table_of_referrers,
|
||||
schemas.MetricOfTable.FETCH: __get_table_of_requests
|
||||
}
|
||||
return supported.get(data.metric_of, not_supported)(project=project, data=data, user_id=user_id)
|
||||
|
||||
|
||||
def get_chart(project: schemas.ProjectContext, data: schemas.CardSchema, user_id: int):
|
||||
if data.is_predefined:
|
||||
return custom_metrics_predefined.get_metric(key=data.metric_of,
|
||||
project_id=project.project_id,
|
||||
data=data.model_dump())
|
||||
|
||||
supported = {
|
||||
schemas.MetricType.TIMESERIES: __get_timeseries_chart,
|
||||
schemas.MetricType.TABLE: __get_table_chart,
|
||||
schemas.MetricType.HEAT_MAP: __get_heat_map_chart,
|
||||
schemas.MetricType.FUNNEL: __get_funnel_chart,
|
||||
schemas.MetricType.INSIGHTS: __get_insights_chart,
|
||||
schemas.MetricType.PATH_ANALYSIS: __get_path_analysis_chart
|
||||
}
|
||||
return supported.get(data.metric_type, not_supported)(project=project, data=data, user_id=user_id)
|
||||
|
||||
|
||||
def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
|
||||
# No need for this because UI is sending the full payload
|
||||
# card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
# if card is None:
|
||||
# return None
|
||||
# metric: schemas.CardSchema = schemas.CardSchema(**card)
|
||||
# metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||
if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id):
|
||||
return None
|
||||
results = []
|
||||
for s in data.series:
|
||||
results.append({"seriesId": s.series_id, "seriesName": s.name,
|
||||
**sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
|
||||
results = []
|
||||
if len(data.series) == 0:
|
||||
return results
|
||||
for s in data.series:
|
||||
if len(data.filters) > 0:
|
||||
s.filter.filters += data.filters
|
||||
s.filter = schemas.SessionsSearchPayloadSchema(**s.filter.model_dump(by_alias=True))
|
||||
|
||||
results.append({"seriesId": None, "seriesName": s.name,
|
||||
**sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def get_issues(project: schemas.ProjectContext, user_id: int, data: schemas.CardSchema):
|
||||
if data.is_predefined:
|
||||
return not_supported()
|
||||
if data.metric_of == schemas.MetricOfTable.ISSUES:
|
||||
return __get_table_of_issues(project=project, user_id=user_id, data=data)
|
||||
supported = {
|
||||
schemas.MetricType.TIMESERIES: not_supported,
|
||||
schemas.MetricType.TABLE: not_supported,
|
||||
schemas.MetricType.HEAT_MAP: not_supported,
|
||||
schemas.MetricType.INSIGHTS: not_supported,
|
||||
schemas.MetricType.PATH_ANALYSIS: not_supported,
|
||||
}
|
||||
return supported.get(data.metric_type, not_supported)()
|
||||
|
||||
|
||||
def __get_path_analysis_card_info(data: schemas.CardPathAnalysis):
|
||||
r = {"start_point": [s.model_dump() for s in data.start_point],
|
||||
"start_type": data.start_type,
|
||||
"excludes": [e.model_dump() for e in data.excludes],
|
||||
"hideExcess": data.hide_excess}
|
||||
return r
|
||||
|
||||
|
||||
def create_card(project: schemas.ProjectContext, user_id, data: schemas.CardSchema, dashboard=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
session_data = None
|
||||
if data.metric_type == schemas.MetricType.HEAT_MAP:
|
||||
if data.session_id is not None:
|
||||
session_data = {"sessionId": data.session_id}
|
||||
else:
|
||||
session_data = __get_heat_map_chart(project=project, user_id=user_id,
|
||||
data=data, include_mobs=False)
|
||||
if session_data is not None:
|
||||
session_data = {"sessionId": session_data["sessionId"]}
|
||||
|
||||
if session_data is not None:
|
||||
# for EE only
|
||||
keys = sessions_mobs. \
|
||||
__get_mob_keys(project_id=project.project_id, session_id=session_data["sessionId"])
|
||||
keys += sessions_mobs. \
|
||||
__get_mob_keys_deprecated(session_id=session_data["sessionId"]) # To support old sessions
|
||||
tag = config('RETENTION_L_VALUE', default='vault')
|
||||
for k in keys:
|
||||
try:
|
||||
extra.tag_session(file_key=k, tag_value=tag)
|
||||
except Exception as e:
|
||||
logger.warning(f"!!!Error while tagging: {k} to {tag} for heatMap")
|
||||
logger.error(str(e))
|
||||
|
||||
_data = {"session_data": json.dumps(session_data) if session_data is not None else None}
|
||||
for i, s in enumerate(data.series):
|
||||
for k in s.model_dump().keys():
|
||||
_data[f"{k}_{i}"] = s.__getattribute__(k)
|
||||
_data[f"index_{i}"] = i
|
||||
_data[f"filter_{i}"] = s.filter.json()
|
||||
series_len = len(data.series)
|
||||
params = {"user_id": user_id, "project_id": project.project_id, **data.model_dump(), **_data,
|
||||
"default_config": json.dumps(data.default_config.model_dump()), "card_info": None}
|
||||
if data.metric_type == schemas.MetricType.PATH_ANALYSIS:
|
||||
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))
|
||||
|
||||
query = """INSERT INTO metrics (project_id, user_id, name, is_public,
|
||||
view_type, metric_type, metric_of, metric_value,
|
||||
metric_format, default_config, thumbnail, data,
|
||||
card_info)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
|
||||
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
|
||||
%(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s,
|
||||
%(card_info)s)
|
||||
RETURNING metric_id"""
|
||||
if len(data.series) > 0:
|
||||
query = f"""WITH m AS ({query})
|
||||
INSERT INTO metric_series(metric_id, index, name, filter)
|
||||
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)"
|
||||
for i in range(series_len)])}
|
||||
RETURNING metric_id;"""
|
||||
|
||||
query = cur.mogrify(query, params)
|
||||
cur.execute(query)
|
||||
r = cur.fetchone()
|
||||
if dashboard:
|
||||
return r["metric_id"]
|
||||
return {"data": get_card(metric_id=r["metric_id"], project_id=project.project_id, user_id=user_id)}
|
||||
|
||||
|
||||
def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
|
||||
metric: dict = get_card(metric_id=metric_id, project_id=project_id,
|
||||
user_id=user_id, flatten=False, include_data=True)
|
||||
if metric is None:
|
||||
return None
|
||||
series_ids = [r["seriesId"] for r in metric["series"]]
|
||||
n_series = []
|
||||
d_series_ids = []
|
||||
u_series = []
|
||||
u_series_ids = []
|
||||
params = {"metric_id": metric_id, "is_public": data.is_public, "name": data.name,
|
||||
"user_id": user_id, "project_id": project_id, "view_type": data.view_type,
|
||||
"metric_type": data.metric_type, "metric_of": data.metric_of,
|
||||
"metric_value": data.metric_value, "metric_format": data.metric_format,
|
||||
"config": json.dumps(data.default_config.model_dump()), "thumbnail": data.thumbnail}
|
||||
for i, s in enumerate(data.series):
|
||||
prefix = "u_"
|
||||
if s.index is None:
|
||||
s.index = i
|
||||
if s.series_id is None or s.series_id not in series_ids:
|
||||
n_series.append({"i": i, "s": s})
|
||||
prefix = "n_"
|
||||
else:
|
||||
u_series.append({"i": i, "s": s})
|
||||
u_series_ids.append(s.series_id)
|
||||
ns = s.model_dump()
|
||||
for k in ns.keys():
|
||||
if k == "filter":
|
||||
ns[k] = json.dumps(ns[k])
|
||||
params[f"{prefix}{k}_{i}"] = ns[k]
|
||||
for i in series_ids:
|
||||
if i not in u_series_ids:
|
||||
d_series_ids.append(i)
|
||||
params["d_series_ids"] = tuple(d_series_ids)
|
||||
params["card_info"] = None
|
||||
params["session_data"] = json.dumps(metric["data"])
|
||||
if data.metric_type == schemas.MetricType.PATH_ANALYSIS:
|
||||
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))
|
||||
elif data.metric_type == schemas.MetricType.HEAT_MAP:
|
||||
if data.session_id is not None:
|
||||
params["session_data"] = json.dumps({"sessionId": data.session_id})
|
||||
elif metric.get("data") and metric["data"].get("sessionId"):
|
||||
params["session_data"] = json.dumps({"sessionId": metric["data"]["sessionId"]})
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
sub_queries = []
|
||||
if len(n_series) > 0:
|
||||
sub_queries.append(f"""\
|
||||
n AS (INSERT INTO metric_series (metric_id, index, name, filter)
|
||||
VALUES {",".join([f"(%(metric_id)s, %(n_index_{s['i']})s, %(n_name_{s['i']})s, %(n_filter_{s['i']})s::jsonb)"
|
||||
for s in n_series])}
|
||||
RETURNING 1)""")
|
||||
if len(u_series) > 0:
|
||||
sub_queries.append(f"""\
|
||||
u AS (UPDATE metric_series
|
||||
SET name=series.name,
|
||||
filter=series.filter,
|
||||
index=series.index
|
||||
FROM (VALUES {",".join([f"(%(u_series_id_{s['i']})s,%(u_index_{s['i']})s,%(u_name_{s['i']})s,%(u_filter_{s['i']})s::jsonb)"
|
||||
for s in u_series])}) AS series(series_id, index, name, filter)
|
||||
WHERE metric_series.metric_id =%(metric_id)s AND metric_series.series_id=series.series_id
|
||||
RETURNING 1)""")
|
||||
if len(d_series_ids) > 0:
|
||||
sub_queries.append("""\
|
||||
d AS (DELETE FROM metric_series WHERE metric_id =%(metric_id)s AND series_id IN %(d_series_ids)s
|
||||
RETURNING 1)""")
|
||||
query = cur.mogrify(f"""\
|
||||
{"WITH " if len(sub_queries) > 0 else ""}{",".join(sub_queries)}
|
||||
UPDATE metrics
|
||||
SET name = %(name)s, is_public= %(is_public)s,
|
||||
view_type= %(view_type)s, metric_type= %(metric_type)s,
|
||||
metric_of= %(metric_of)s, metric_value= %(metric_value)s,
|
||||
metric_format= %(metric_format)s,
|
||||
edited_at = timezone('utc'::text, now()),
|
||||
default_config = %(config)s,
|
||||
thumbnail = %(thumbnail)s,
|
||||
card_info = %(card_info)s,
|
||||
data = %(session_data)s
|
||||
WHERE metric_id = %(metric_id)s
|
||||
AND project_id = %(project_id)s
|
||||
AND (user_id = %(user_id)s OR is_public)
|
||||
RETURNING metric_id;""", params)
|
||||
cur.execute(query)
|
||||
return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id)
|
||||
|
||||
|
||||
def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_series=False):
|
||||
constraints = ["metrics.project_id = %(project_id)s",
|
||||
"metrics.deleted_at ISNULL"]
|
||||
params = {"project_id": project_id, "user_id": user_id,
|
||||
"offset": (data.page - 1) * data.limit,
|
||||
"limit": data.limit, }
|
||||
if data.mine_only:
|
||||
constraints.append("user_id = %(user_id)s")
|
||||
else:
|
||||
constraints.append("(user_id = %(user_id)s OR metrics.is_public)")
|
||||
if data.shared_only:
|
||||
constraints.append("is_public")
|
||||
|
||||
if data.query is not None and len(data.query) > 0:
|
||||
constraints.append("(name ILIKE %(query)s OR owner.owner_email ILIKE %(query)s)")
|
||||
params["query"] = helper.values_for_operator(value=data.query,
|
||||
op=schemas.SearchEventOperator.CONTAINS)
|
||||
with pg_client.PostgresClient() as cur:
|
||||
sub_join = ""
|
||||
if include_series:
|
||||
sub_join = """LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
|
||||
FROM metric_series
|
||||
WHERE metric_series.metric_id = metrics.metric_id
|
||||
AND metric_series.deleted_at ISNULL
|
||||
) AS metric_series ON (TRUE)"""
|
||||
query = cur.mogrify(
|
||||
f"""SELECT metric_id, project_id, user_id, name, is_public, created_at, edited_at,
|
||||
metric_type, metric_of, metric_format, metric_value, view_type, is_pinned,
|
||||
dashboards, owner_email, owner_name, default_config AS config, thumbnail
|
||||
FROM metrics
|
||||
{sub_join}
|
||||
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
|
||||
FROM (SELECT DISTINCT dashboard_id, name, is_public
|
||||
FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id)
|
||||
WHERE deleted_at ISNULL
|
||||
AND dashboard_widgets.metric_id = metrics.metric_id
|
||||
AND project_id = %(project_id)s
|
||||
AND ((dashboards.user_id = %(user_id)s OR is_public))) AS connected_dashboards
|
||||
) AS connected_dashboards ON (TRUE)
|
||||
LEFT JOIN LATERAL (SELECT email AS owner_email, name AS owner_name
|
||||
FROM users
|
||||
WHERE deleted_at ISNULL
|
||||
AND users.user_id = metrics.user_id
|
||||
) AS owner ON (TRUE)
|
||||
WHERE {" AND ".join(constraints)}
|
||||
ORDER BY created_at {data.order.value}
|
||||
LIMIT %(limit)s OFFSET %(offset)s;""", params)
|
||||
logger.debug("---------")
|
||||
logger.debug(query)
|
||||
logger.debug("---------")
|
||||
cur.execute(query)
|
||||
rows = cur.fetchall()
|
||||
if include_series:
|
||||
for r in rows:
|
||||
for s in r["series"]:
|
||||
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
|
||||
else:
|
||||
for r in rows:
|
||||
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
|
||||
r["edited_at"] = TimeUTC.datetime_to_timestamp(r["edited_at"])
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
return rows
|
||||
|
||||
|
||||
def get_all(project_id, user_id):
|
||||
default_search = schemas.SearchCardsSchema()
|
||||
rows = search_all(project_id=project_id, user_id=user_id, data=default_search)
|
||||
result = rows
|
||||
while len(rows) == default_search.limit:
|
||||
default_search.page += 1
|
||||
rows = search_all(project_id=project_id, user_id=user_id, data=default_search)
|
||||
result += rows
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def delete_card(project_id, metric_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.metrics
|
||||
SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now())
|
||||
WHERE project_id = %(project_id)s
|
||||
AND metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public)
|
||||
RETURNING data;""",
|
||||
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id})
|
||||
)
|
||||
# for EE only
|
||||
row = cur.fetchone()
|
||||
if row:
|
||||
if row["data"] and not sessions_favorite.favorite_session_exists(session_id=row["data"]["sessionId"]):
|
||||
keys = sessions_mobs. \
|
||||
__get_mob_keys(project_id=project_id, session_id=row["data"]["sessionId"])
|
||||
keys += sessions_mobs. \
|
||||
__get_mob_keys_deprecated(session_id=row["data"]["sessionId"]) # To support old sessions
|
||||
tag = config('RETENTION_D_VALUE', default='default')
|
||||
for k in keys:
|
||||
try:
|
||||
extra.tag_session(file_key=k, tag_value=tag)
|
||||
except Exception as e:
|
||||
logger.warning(f"!!!Error while tagging: {k} to {tag} for heatMap")
|
||||
logger.error(str(e))
|
||||
return {"state": "success"}
|
||||
|
||||
|
||||
def __get_path_analysis_attributes(row):
|
||||
card_info = row.pop("cardInfo")
|
||||
row["excludes"] = card_info.get("excludes", [])
|
||||
row["startPoint"] = card_info.get("startPoint", [])
|
||||
row["startType"] = card_info.get("startType", "start")
|
||||
row["hideExcess"] = card_info.get("hideExcess", False)
|
||||
return row
|
||||
|
||||
|
||||
def get_card(metric_id, project_id, user_id, flatten: bool = True, include_data: bool = False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
f"""SELECT metric_id, project_id, user_id, name, is_public, created_at, deleted_at, edited_at, metric_type,
|
||||
view_type, metric_of, metric_value, metric_format, is_pinned, default_config,
|
||||
default_config AS config,series, dashboards, owner_email, card_info
|
||||
{',data' if include_data else ''}
|
||||
FROM metrics
|
||||
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
|
||||
FROM metric_series
|
||||
WHERE metric_series.metric_id = metrics.metric_id
|
||||
AND metric_series.deleted_at ISNULL
|
||||
) AS metric_series ON (TRUE)
|
||||
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
|
||||
FROM (SELECT dashboard_id, name, is_public
|
||||
FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id)
|
||||
WHERE deleted_at ISNULL
|
||||
AND project_id = %(project_id)s
|
||||
AND ((dashboards.user_id = %(user_id)s OR is_public))
|
||||
AND metric_id = %(metric_id)s) AS connected_dashboards
|
||||
) AS connected_dashboards ON (TRUE)
|
||||
LEFT JOIN LATERAL (SELECT email AS owner_email
|
||||
FROM users
|
||||
WHERE deleted_at ISNULL
|
||||
AND users.user_id = metrics.user_id
|
||||
) AS owner ON (TRUE)
|
||||
WHERE metrics.project_id = %(project_id)s
|
||||
AND metrics.deleted_at ISNULL
|
||||
AND (metrics.user_id = %(user_id)s OR metrics.is_public)
|
||||
AND metrics.metric_id = %(metric_id)s
|
||||
ORDER BY created_at;""",
|
||||
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
cur.execute(query)
|
||||
row = cur.fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
|
||||
row["edited_at"] = TimeUTC.datetime_to_timestamp(row["edited_at"])
|
||||
if flatten:
|
||||
for s in row["series"]:
|
||||
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
|
||||
row = helper.dict_to_camel_case(row)
|
||||
if row["metricType"] == schemas.MetricType.PATH_ANALYSIS:
|
||||
row = __get_path_analysis_attributes(row=row)
|
||||
return row
|
||||
|
||||
|
||||
def get_series_for_alert(project_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""SELECT series_id AS value,
|
||||
metrics.name || '.' || (COALESCE(metric_series.name, 'series ' || index)) || '.count' AS name,
|
||||
'count' AS unit,
|
||||
FALSE AS predefined,
|
||||
metric_id,
|
||||
series_id
|
||||
FROM metric_series
|
||||
INNER JOIN metrics USING (metric_id)
|
||||
WHERE metrics.deleted_at ISNULL
|
||||
AND metrics.project_id = %(project_id)s
|
||||
AND metrics.metric_type = 'timeseries'
|
||||
AND (user_id = %(user_id)s OR is_public)
|
||||
ORDER BY name;""",
|
||||
{"project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
def change_state(project_id, metric_id, user_id, status):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.metrics
|
||||
SET active = %(status)s
|
||||
WHERE metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
{"metric_id": metric_id, "status": status, "user_id": user_id})
|
||||
)
|
||||
return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id)
|
||||
|
||||
|
||||
def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
|
||||
data: schemas.CardSessionsSchema
|
||||
# , range_value=None, start_date=None, end_date=None
|
||||
):
|
||||
# No need for this because UI is sending the full payload
|
||||
# card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
# if card is None:
|
||||
# return None
|
||||
# metric: schemas.CardSchema = schemas.CardSchema(**card)
|
||||
# metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||
# if metric is None:
|
||||
# return None
|
||||
if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id):
|
||||
return None
|
||||
for s in data.series:
|
||||
s.filter.startTimestamp = data.startTimestamp
|
||||
s.filter.endTimestamp = data.endTimestamp
|
||||
s.filter.limit = data.limit
|
||||
s.filter.page = data.page
|
||||
issues_list = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {})
|
||||
issues_list = issues_list.get("significant", []) + issues_list.get("insignificant", [])
|
||||
issue = None
|
||||
for i in issues_list:
|
||||
if i.get("issueId", "") == issue_id:
|
||||
issue = i
|
||||
break
|
||||
if issue is None:
|
||||
issue = issues.get(project_id=project_id, issue_id=issue_id)
|
||||
if issue is not None:
|
||||
issue = {**issue,
|
||||
"affectedSessions": 0,
|
||||
"affectedUsers": 0,
|
||||
"conversionImpact": 0,
|
||||
"lostConversions": 0,
|
||||
"unaffectedSessions": 0}
|
||||
return {"seriesId": s.series_id, "seriesName": s.name,
|
||||
"sessions": sessions.search_sessions(user_id=user_id, project_id=project_id,
|
||||
issue=issue, data=s.filter)
|
||||
if issue is not None else {"total": 0, "sessions": []},
|
||||
"issue": issue}
|
||||
|
||||
|
||||
def make_chart_from_card(project: schemas.ProjectContext, user_id, metric_id, data: schemas.CardSessionsSchema):
|
||||
raw_metric: dict = get_card(metric_id=metric_id, project_id=project.project_id, user_id=user_id, include_data=True)
|
||||
|
||||
if raw_metric is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="card not found")
|
||||
raw_metric["startTimestamp"] = data.startTimestamp
|
||||
raw_metric["endTimestamp"] = data.endTimestamp
|
||||
raw_metric["limit"] = data.limit
|
||||
raw_metric["density"] = data.density
|
||||
metric: schemas.CardSchema = schemas.CardSchema(**raw_metric)
|
||||
|
||||
if metric.is_predefined:
|
||||
return custom_metrics_predefined.get_metric(key=metric.metric_of,
|
||||
project_id=project.project_id,
|
||||
data=data.model_dump())
|
||||
elif metric.metric_type == schemas.MetricType.HEAT_MAP:
|
||||
if raw_metric["data"] and raw_metric["data"].get("sessionId"):
|
||||
return heatmaps.get_selected_session(project_id=project.project_id,
|
||||
session_id=raw_metric["data"]["sessionId"])
|
||||
else:
|
||||
return heatmaps.search_short_session(project_id=project.project_id,
|
||||
data=schemas.HeatMapSessionsSearch(**metric.model_dump()),
|
||||
user_id=user_id)
|
||||
|
||||
return get_chart(project=project, data=metric, user_id=user_id)
|
||||
|
||||
|
||||
def card_exists(metric_id, project_id, user_id) -> bool:
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
f"""SELECT 1
|
||||
FROM metrics
|
||||
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
|
||||
FROM (SELECT dashboard_id, name, is_public
|
||||
FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id)
|
||||
WHERE deleted_at ISNULL
|
||||
AND project_id = %(project_id)s
|
||||
AND ((dashboards.user_id = %(user_id)s OR is_public))
|
||||
AND metric_id = %(metric_id)s) AS connected_dashboards
|
||||
) AS connected_dashboards ON (TRUE)
|
||||
LEFT JOIN LATERAL (SELECT email AS owner_email
|
||||
FROM users
|
||||
WHERE deleted_at ISNULL
|
||||
AND users.user_id = metrics.user_id
|
||||
) AS owner ON (TRUE)
|
||||
WHERE metrics.project_id = %(project_id)s
|
||||
AND metrics.deleted_at ISNULL
|
||||
AND (metrics.user_id = %(user_id)s OR metrics.is_public)
|
||||
AND metrics.metric_id = %(metric_id)s
|
||||
ORDER BY created_at;""",
|
||||
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
cur.execute(query)
|
||||
row = cur.fetchone()
|
||||
return row is not None
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue