Merge branch 'api-v1.9.5' into dev

This commit is contained in:
Taha Yassine Kraiem 2023-01-20 14:41:45 +01:00
commit b620eb5e10
104 changed files with 5165 additions and 4851 deletions

1
api/.gitignore vendored
View file

@ -84,6 +84,7 @@ wheels/
*.egg
MANIFEST
Pipfile
Pipfile.lock
# PyInstaller
# Usually these files are written by a python script from a template

View file

@ -1,4 +1,4 @@
FROM python:3.10-alpine
FROM python:3.11-alpine
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
ARG GIT_SHA

View file

@ -1,4 +1,4 @@
FROM python:3.10-alpine
FROM python:3.11-alpine
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
ARG GIT_SHA

View file

@ -1,29 +0,0 @@
FROM python:3.9.12-slim
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
WORKDIR /work
COPY . .
COPY ../utilities ./utilities
RUN rm entrypoint.sh && rm .chalice/config.json
RUN mv entrypoint.bundle.sh entrypoint.sh && mv .chalice/config.bundle.json .chalice/config.json
RUN pip install -r requirements.txt -t ./vendor --upgrade
RUN pip install chalice==1.22.2
# Installing Nodejs
RUN apt update && apt install -y curl && \
curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \
apt install -y nodejs && \
apt remove --purge -y curl && \
rm -rf /var/lib/apt/lists/* && \
cd utilities && \
npm install
# Add Tini
# Startup daemon
ENV TINI_VERSION v0.19.0
ARG envarg
ENV ENTERPRISE_BUILD ${envarg}
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini
RUN chmod +x /tini
RUN adduser -u 1001 openreplay -D
USER 1001
ENTRYPOINT ["/tini", "--"]
CMD ./entrypoint.sh

View file

@ -12,7 +12,7 @@ from chalicelib.utils import pg_client
from routers import core, core_dynamic
from routers.crons import core_crons
from routers.crons import core_dynamic_crons
from routers.subs import dashboard, insights, metrics, v1_api
from routers.subs import insights, metrics, v1_api
app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default=""))
app.add_middleware(GZipMiddleware, minimum_size=1000)
@ -48,7 +48,6 @@ app.include_router(core.app_apikey)
app.include_router(core_dynamic.public_app)
app.include_router(core_dynamic.app)
app.include_router(core_dynamic.app_apikey)
app.include_router(dashboard.app)
app.include_router(metrics.app)
app.include_router(insights.app)
app.include_router(v1_api.app_apikey)

View file

@ -1,9 +1,14 @@
import json
import logging
import time
from datetime import datetime
from decouple import config
import schemas
from chalicelib.core import notifications, slack, webhook
from chalicelib.core import notifications, webhook
from chalicelib.core.collaboration_msteams import MSTeams
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import pg_client, helper, email_helper
from chalicelib.utils.TimeUTC import TimeUTC
@ -95,7 +100,7 @@ def process_notifications(data):
for c in n["options"].pop("message"):
if c["type"] not in full:
full[c["type"]] = []
if c["type"] in ["slack", "email"]:
if c["type"] in ["slack", "msteams", "email"]:
full[c["type"]].append({
"notification": n,
"destination": c["value"]
@ -107,13 +112,21 @@ def process_notifications(data):
for t in full.keys():
for i in range(0, len(full[t]), BATCH_SIZE):
notifications_list = full[t][i:i + BATCH_SIZE]
if notifications_list is None or len(notifications_list) == 0:
break
if t == "slack":
try:
slack.send_batch(notifications_list=notifications_list)
send_to_slack_batch(notifications_list=notifications_list)
except Exception as e:
logging.error("!!!Error while sending slack notifications batch")
logging.error(str(e))
elif t == "msteams":
try:
send_to_msteams_batch(notifications_list=notifications_list)
except Exception as e:
logging.error("!!!Error while sending msteams notifications batch")
logging.error(str(e))
elif t == "email":
try:
send_by_email_batch(notifications_list=notifications_list)
@ -149,16 +162,60 @@ def send_by_email_batch(notifications_list):
time.sleep(1)
def send_to_slack_batch(notifications_list):
webhookId_map = {}
for n in notifications_list:
if n.get("destination") not in webhookId_map:
webhookId_map[n.get("destination")] = {"tenantId": n["notification"]["tenantId"], "batch": []}
webhookId_map[n.get("destination")]["batch"].append({"text": n["notification"]["description"] \
+ f"\n<{config('SITE_URL')}{n['notification']['buttonUrl']}|{n['notification']['buttonText']}>",
"title": n["notification"]["title"],
"title_link": n["notification"]["buttonUrl"],
"ts": datetime.now().timestamp()})
for batch in webhookId_map.keys():
Slack.send_batch(tenant_id=webhookId_map[batch]["tenantId"], webhook_id=batch,
attachments=webhookId_map[batch]["batch"])
def send_to_msteams_batch(notifications_list):
webhookId_map = {}
for n in notifications_list:
if n.get("destination") not in webhookId_map:
webhookId_map[n.get("destination")] = {"tenantId": n["notification"]["tenantId"], "batch": []}
link = f"[{n['notification']['buttonText']}]({config('SITE_URL')}{n['notification']['buttonUrl']})"
webhookId_map[n.get("destination")]["batch"].append({"type": "ColumnSet",
"style": "emphasis",
"separator": True,
"bleed": True,
"columns": [{
"width": "stretch",
"items": [
{"type": "TextBlock",
"text": n["notification"]["title"],
"style": "heading",
"size": "Large"},
{"type": "TextBlock",
"spacing": "small",
"text": n["notification"]["description"],
"wrap": True},
{"type": "TextBlock",
"spacing": "small",
"text": link}
]
}]})
for batch in webhookId_map.keys():
MSTeams.send_batch(tenant_id=webhookId_map[batch]["tenantId"], webhook_id=batch,
attachments=webhookId_map[batch]["batch"])
def delete(project_id, alert_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
UPDATE public.alerts
SET
deleted_at = timezone('utc'::text, now()),
active = FALSE
WHERE
alert_id = %(alert_id)s AND project_id=%(project_id)s;""",
cur.mogrify(""" UPDATE public.alerts
SET deleted_at = timezone('utc'::text, now()),
active = FALSE
WHERE alert_id = %(alert_id)s AND project_id=%(project_id)s;""",
{"alert_id": alert_id, "project_id": project_id})
)
return {"data": {"state": "success"}}

View file

@ -15,7 +15,7 @@ def jwt_authorizer(token):
token[1],
config("jwt_secret"),
algorithms=config("jwt_algorithm"),
audience=[ f"front:{helper.get_stage_name()}"]
audience=[f"front:{helper.get_stage_name()}"]
)
except jwt.ExpiredSignatureError:
print("! JWT Expired signature")
@ -37,12 +37,16 @@ def jwt_context(context):
}
def get_jwt_exp(iat):
return iat // 1000 + config("JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000
def generate_jwt(id, tenant_id, iat, aud):
token = jwt.encode(
payload={
"userId": id,
"tenantId": tenant_id,
"exp": iat // 1000 + config("JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000,
"exp": get_jwt_exp(iat),
"iss": config("JWT_ISSUER"),
"iat": iat // 1000,
"aud": aud

View file

@ -1,5 +1,5 @@
import schemas
from chalicelib.core import countries
from chalicelib.core import countries, events, metadata
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.event_filter_definition import Event
@ -36,6 +36,7 @@ def __get_autocomplete_table(value, project_id):
WHERE project_id = %(project_id)s
AND type= '{e}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 5)""")
if len(value) > 2:
sub_queries.append(f"""(SELECT type, value
@ -43,6 +44,7 @@ def __get_autocomplete_table(value, project_id):
WHERE project_id = %(project_id)s
AND type= '{e}'
AND value ILIKE %(value)s
ORDER BY value
LIMIT 5)""")
with pg_client.PostgresClient() as cur:
query = cur.mogrify(" UNION DISTINCT ".join(sub_queries) + ";",
@ -70,7 +72,7 @@ def __generic_query(typename, value_length=None):
FROM {TABLE}
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND type='{typename.upper()}'
AND value IN %(value)s
ORDER BY value"""
@ -79,7 +81,7 @@ def __generic_query(typename, value_length=None):
FROM {TABLE}
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND type='{typename.upper()}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 5)
@ -88,7 +90,7 @@ def __generic_query(typename, value_length=None):
FROM {TABLE}
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND type='{typename.upper()}'
AND value ILIKE %(value)s
ORDER BY value
LIMIT 5);"""
@ -96,7 +98,7 @@ def __generic_query(typename, value_length=None):
FROM {TABLE}
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND type='{typename.upper()}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 10;"""
@ -122,6 +124,8 @@ def __generic_autocomplete_metas(typename):
if typename == schemas.FilterType.user_country:
params["value"] = tuple(countries.get_country_code_autocomplete(text))
if len(params["value"]) == 0:
return []
query = cur.mogrify(__generic_query(typename, value_length=len(text)), params)
cur.execute(query)
@ -129,3 +133,194 @@ def __generic_autocomplete_metas(typename):
return rows
return f
def __errors_query(source=None, value_length=None):
if value_length is None or value_length > 2:
return f"""((SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(value)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5));"""
return f"""((SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5));"""
def __search_errors(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(__errors_query(source,
value_length=len(value)),
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value),
"source": source}))
results = helper.list_to_camel_case(cur.fetchall())
return results
def __search_errors_ios(project_id, value, key=None, source=None):
if len(value) > 2:
query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{events.EventType.ERROR_IOS.ui_type}' AS type
FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.reason ILIKE %(svalue)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{events.EventType.ERROR_IOS.ui_type}' AS type
FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{events.EventType.ERROR_IOS.ui_type}' AS type
FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.reason ILIKE %(value)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{events.EventType.ERROR_IOS.ui_type}' AS type
FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
LIMIT 5);"""
else:
query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{events.EventType.ERROR_IOS.ui_type}' AS type
FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.reason ILIKE %(svalue)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{events.EventType.ERROR_IOS.ui_type}' AS type
FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
LIMIT 5);"""
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall())
return results
def __search_metadata(project_id, value, key=None, source=None):
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
if len(meta_keys) == 0 or key is not None and key not in meta_keys.keys():
return []
sub_from = []
if key is not None:
meta_keys = {key: meta_keys[key]}
for k in meta_keys.keys():
colname = metadata.index_to_colname(meta_keys[k])
if len(value) > 2:
sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)
UNION
(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(value)s LIMIT 5))
""")
else:
sub_from.append(f"""(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
SELECT key, value, 'METADATA' AS TYPE
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall())
return results

View file

@ -0,0 +1,76 @@
import schemas
from chalicelib.core import sessions_mobs, sessions as sessions_search, events
from chalicelib.utils import pg_client, helper
SESSION_PROJECTION_COLS = """s.project_id,
s.session_id::text AS session_id,
s.user_uuid,
s.user_id,
s.user_os,
s.user_browser,
s.user_device,
s.user_device_type,
s.user_country,
s.start_ts,
s.duration,
s.events_count,
s.pages_count,
s.errors_count,
s.user_anonymous_id,
s.platform,
s.issue_score,
to_jsonb(s.issue_types) AS issue_types,
favorite_sessions.session_id NOTNULL AS favorite,
COALESCE((SELECT TRUE
FROM public.user_viewed_sessions AS fs
WHERE s.session_id = fs.session_id
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """
def search_short_session(data: schemas.FlatClickMapSessionsSearch, project_id, user_id):
no_platform = True
for f in data.filters:
if f.type == schemas.FilterType.platform:
no_platform = False
break
if no_platform:
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.platform,
value=[schemas.PlatformType.desktop],
operator=schemas.SearchEventOperator._is))
full_args, query_part = sessions_search.search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=data.bookmarked, issue=None,
project_id=project_id, user_id=user_id)
with pg_client.PostgresClient() as cur:
data.order = schemas.SortOrderType.desc
data.sort = 'duration'
# meta_keys = metadata.get(project_id=project_id)
meta_keys = []
main_query = cur.mogrify(f"""SELECT {SESSION_PROJECTION_COLS}
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
{query_part}
ORDER BY {data.sort} {data.order}
LIMIT 1;""", full_args)
# print("--------------------")
# print(main_query)
# print("--------------------")
try:
cur.execute(main_query)
except Exception as err:
print("--------- CLICK MAP SHORT SESSION SEARCH QUERY EXCEPTION -----------")
print(main_query.decode('UTF-8'))
print("--------- PAYLOAD -----------")
print(data.json())
print("--------------------")
raise err
session = cur.fetchone()
if session:
session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id)
session['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session["session_id"])
session['events'] = events.get_by_session_id(project_id=project_id, session_id=session["session_id"],
event_type=schemas.EventType.location)
return helper.dict_to_camel_case(session)

View file

@ -0,0 +1,45 @@
from abc import ABC, abstractmethod
import schemas
class BaseCollaboration(ABC):
@classmethod
@abstractmethod
def add(cls, tenant_id, data: schemas.AddCollaborationSchema):
pass
@classmethod
@abstractmethod
def say_hello(cls, url):
pass
@classmethod
@abstractmethod
def send_raw(cls, tenant_id, webhook_id, body):
pass
@classmethod
@abstractmethod
def send_batch(cls, tenant_id, webhook_id, attachments):
pass
@classmethod
@abstractmethod
def __share(cls, tenant_id, integration_id, attachments):
pass
@classmethod
@abstractmethod
def share_session(cls, tenant_id, project_id, session_id, user, comment, integration_id=None):
pass
@classmethod
@abstractmethod
def share_error(cls, tenant_id, project_id, error_id, user, comment, integration_id=None):
pass
@classmethod
@abstractmethod
def __get(cls, tenant_id, integration_id=None):
pass

View file

@ -0,0 +1,190 @@
import json
import requests
from decouple import config
import schemas
from chalicelib.core import webhook
from chalicelib.core.collaboration_base import BaseCollaboration
class MSTeams(BaseCollaboration):
@classmethod
def add(cls, tenant_id, data: schemas.AddCollaborationSchema):
if cls.say_hello(data.url):
return webhook.add(tenant_id=tenant_id,
endpoint=data.url,
webhook_type="msteams",
name=data.name)
return None
# https://messagecardplayground.azurewebsites.net
# https://adaptivecards.io/designer/
@classmethod
def say_hello(cls, url):
r = requests.post(
url=url,
json={
"@type": "MessageCard",
"@context": "https://schema.org/extensions",
"summary": "Hello message",
"title": "Welcome to OpenReplay"
})
if r.status_code != 200:
print("MSTeams integration failed")
print(r.text)
return False
return True
@classmethod
def send_raw(cls, tenant_id, webhook_id, body):
integration = cls.__get(tenant_id=tenant_id, integration_id=webhook_id)
if integration is None:
return {"errors": ["msteams integration not found"]}
try:
r = requests.post(
url=integration["endpoint"],
json=body,
timeout=5)
if r.status_code != 200:
print(f"!! issue sending msteams raw; webhookId:{webhook_id} code:{r.status_code}")
print(r.text)
return None
except requests.exceptions.Timeout:
print(f"!! Timeout sending msteams raw webhookId:{webhook_id}")
return None
except Exception as e:
print(f"!! Issue sending msteams raw webhookId:{webhook_id}")
print(str(e))
return None
return {"data": r.text}
@classmethod
def send_batch(cls, tenant_id, webhook_id, attachments):
integration = cls.__get(tenant_id=tenant_id, integration_id=webhook_id)
if integration is None:
return {"errors": ["msteams integration not found"]}
print(f"====> sending msteams batch notification: {len(attachments)}")
for i in range(0, len(attachments), 100):
print(json.dumps({"type": "message",
"attachments": [
{"contentType": "application/vnd.microsoft.card.adaptive",
"contentUrl": None,
"content": {
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"type": "AdaptiveCard",
"version": "1.2",
"body": attachments[i:i + 100]}}
]}))
r = requests.post(
url=integration["endpoint"],
json={"type": "message",
"attachments": [
{"contentType": "application/vnd.microsoft.card.adaptive",
"contentUrl": None,
"content": {
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"type": "AdaptiveCard",
"version": "1.2",
"body": attachments[i:i + 100]}}
]})
if r.status_code != 200:
print("!!!! something went wrong")
print(r)
print(r.text)
@classmethod
def __share(cls, tenant_id, integration_id, attachement):
integration = cls.__get(tenant_id=tenant_id, integration_id=integration_id)
if integration is None:
return {"errors": ["Microsoft Teams integration not found"]}
r = requests.post(
url=integration["endpoint"],
json={"type": "message",
"attachments": [
{"contentType": "application/vnd.microsoft.card.adaptive",
"contentUrl": None,
"content": {
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"type": "AdaptiveCard",
"version": "1.5",
"body": [attachement]}}
]
})
return r.text
@classmethod
def share_session(cls, tenant_id, project_id, session_id, user, comment, integration_id=None):
title = f"[{user}](mailto:{user}) has shared the below session!"
link = f"{config('SITE_URL')}/{project_id}/session/{session_id}"
link = f"[{link}]({link})"
args = {"type": "ColumnSet",
"style": "emphasis",
"separator": True,
"bleed": True,
"columns": [{
"width": "stretch",
"items": [
{"type": "TextBlock",
"text": title,
"style": "heading",
"size": "Large"},
{"type": "TextBlock",
"spacing": "small",
"text": link}
]
}]}
if comment and len(comment) > 0:
args["columns"][0]["items"].append({
"type": "TextBlock",
"spacing": "small",
"text": comment
})
data = cls.__share(tenant_id, integration_id, attachement=args)
if "errors" in data:
return data
return {"data": data}
@classmethod
def share_error(cls, tenant_id, project_id, error_id, user, comment, integration_id=None):
title = f"[{user}](mailto:{user}) has shared the below error!"
link = f"{config('SITE_URL')}/{project_id}/errors/{error_id}"
link = f"[{link}]({link})"
args = {"type": "ColumnSet",
"style": "emphasis",
"separator": True,
"bleed": True,
"columns": [{
"width": "stretch",
"items": [
{"type": "TextBlock",
"text": title,
"style": "heading",
"size": "Large"},
{"type": "TextBlock",
"spacing": "small",
"text": link}
]
}]}
if comment and len(comment) > 0:
args["columns"][0]["items"].append({
"type": "TextBlock",
"spacing": "small",
"text": comment
})
data = cls.__share(tenant_id, integration_id, attachement=args)
if "errors" in data:
return data
return {"data": data}
@classmethod
def __get(cls, tenant_id, integration_id=None):
if integration_id is not None:
return webhook.get_webhook(tenant_id=tenant_id, webhook_id=integration_id,
webhook_type=schemas.WebhookType.msteams)
integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.msteams)
if integrations is None or len(integrations) == 0:
return None
return integrations[0]

View file

@ -1,19 +1,20 @@
import requests
from decouple import config
from datetime import datetime
import schemas
from chalicelib.core import webhook
from chalicelib.core.collaboration_base import BaseCollaboration
class Slack:
class Slack(BaseCollaboration):
@classmethod
def add_channel(cls, tenant_id, **args):
url = args["url"]
name = args["name"]
if cls.say_hello(url):
def add(cls, tenant_id, data: schemas.AddCollaborationSchema):
if cls.say_hello(data.url):
return webhook.add(tenant_id=tenant_id,
endpoint=url,
endpoint=data.url,
webhook_type="slack",
name=name)
name=data.name)
return None
@classmethod
@ -34,37 +35,6 @@ class Slack:
return False
return True
@classmethod
def send_text_attachments(cls, tenant_id, webhook_id, text, **args):
integration = cls.__get(tenant_id=tenant_id, integration_id=webhook_id)
if integration is None:
return {"errors": ["slack integration not found"]}
try:
r = requests.post(
url=integration["endpoint"],
json={
"attachments": [
{
"text": text,
"ts": datetime.now().timestamp(),
**args
}
]
},
timeout=5)
if r.status_code != 200:
print(f"!! issue sending slack text attachments; webhookId:{webhook_id} code:{r.status_code}")
print(r.text)
return None
except requests.exceptions.Timeout:
print(f"!! Timeout sending slack text attachments webhookId:{webhook_id}")
return None
except Exception as e:
print(f"!! Issue sending slack text attachments webhookId:{webhook_id}")
print(str(e))
return None
return {"data": r.text}
@classmethod
def send_raw(cls, tenant_id, webhook_id, body):
integration = cls.__get(tenant_id=tenant_id, integration_id=webhook_id)
@ -105,24 +75,12 @@ class Slack:
print(r.text)
@classmethod
def __share_to_slack(cls, tenant_id, integration_id, fallback, pretext, title, title_link, text):
def __share(cls, tenant_id, integration_id, attachement):
integration = cls.__get(tenant_id=tenant_id, integration_id=integration_id)
if integration is None:
return {"errors": ["slack integration not found"]}
r = requests.post(
url=integration["endpoint"],
json={
"attachments": [
{
"fallback": fallback,
"pretext": pretext,
"title": title,
"title_link": title_link,
"text": text,
"ts": datetime.now().timestamp()
}
]
})
attachement["ts"] = datetime.now().timestamp()
r = requests.post(url=integration["endpoint"], json={"attachments": [attachement]})
return r.text
@classmethod
@ -132,7 +90,10 @@ class Slack:
"title": f"{config('SITE_URL')}/{project_id}/session/{session_id}",
"title_link": f"{config('SITE_URL')}/{project_id}/session/{session_id}",
"text": comment}
return {"data": cls.__share_to_slack(tenant_id, integration_id, **args)}
data = cls.__share(tenant_id, integration_id, attachement=args)
if "errors" in data:
return data
return {"data": data}
@classmethod
def share_error(cls, tenant_id, project_id, error_id, user, comment, integration_id=None):
@ -141,19 +102,18 @@ class Slack:
"title": f"{config('SITE_URL')}/{project_id}/errors/{error_id}",
"title_link": f"{config('SITE_URL')}/{project_id}/errors/{error_id}",
"text": comment}
return {"data": cls.__share_to_slack(tenant_id, integration_id, **args)}
@classmethod
def has_slack(cls, tenant_id):
integration = cls.__get(tenant_id=tenant_id)
return not (integration is None or len(integration) == 0)
data = cls.__share(tenant_id, integration_id, attachement=args)
if "errors" in data:
return data
return {"data": data}
@classmethod
def __get(cls, tenant_id, integration_id=None):
if integration_id is not None:
return webhook.get(tenant_id=tenant_id, webhook_id=integration_id)
return webhook.get_webhook(tenant_id=tenant_id, webhook_id=integration_id,
webhook_type=schemas.WebhookType.slack)
integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type="slack")
integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.slack)
if integrations is None or len(integrations) == 0:
return None
return integrations[0]

View file

@ -257,6 +257,7 @@ COUNTRIES = {
"UA": "Ukraine",
"UG": "Uganda",
"UM": "United States Minor Outlying Islands",
"UN": "United Nations",
"US": "United States",
"UY": "Uruguay",
"UZ": "Uzbekistan",

View file

@ -1,15 +1,18 @@
import json
from typing import Union
from fastapi import HTTPException
from starlette import status
import schemas
from chalicelib.core import sessions, funnels, errors, issues
from chalicelib.core import sessions, funnels, errors, issues, metrics, click_maps
from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
PIE_CHART_GROUP = 5
def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema):
def __try_live(project_id, data: schemas.CreateCardSchema):
results = []
for i, s in enumerate(data.series):
s.filter.startDate = data.startTimestamp
@ -42,11 +45,11 @@ def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema):
return results
def __is_funnel_chart(data: schemas.TryCustomMetricsPayloadSchema):
def __is_funnel_chart(data: schemas.CreateCardSchema):
return data.metric_type == schemas.MetricType.funnel
def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema):
def __get_funnel_chart(project_id, data: schemas.CreateCardSchema):
if len(data.series) == 0:
return {
"stages": [],
@ -57,12 +60,12 @@ def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema):
return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter)
def __is_errors_list(data):
def __is_errors_list(data: schemas.CreateCardSchema):
return data.metric_type == schemas.MetricType.table \
and data.metric_of == schemas.TableMetricOfType.errors
and data.metric_of == schemas.MetricOfTable.errors
def __get_errors_list(project_id, user_id, data):
def __get_errors_list(project_id, user_id, data: schemas.CreateCardSchema):
if len(data.series) == 0:
return {
"total": 0,
@ -75,12 +78,12 @@ def __get_errors_list(project_id, user_id, data):
return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id)
def __is_sessions_list(data):
def __is_sessions_list(data: schemas.CreateCardSchema):
return data.metric_type == schemas.MetricType.table \
and data.metric_of == schemas.TableMetricOfType.sessions
and data.metric_of == schemas.MetricOfTable.sessions
def __get_sessions_list(project_id, user_id, data):
def __get_sessions_list(project_id, user_id, data: schemas.CreateCardSchema):
if len(data.series) == 0:
print("empty series")
return {
@ -94,14 +97,36 @@ def __get_sessions_list(project_id, user_id, data):
return sessions.search_sessions(data=data.series[0].filter, project_id=project_id, user_id=user_id)
def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema, user_id=None):
if __is_funnel_chart(data):
def __is_predefined(data: schemas.CreateCardSchema):
return data.is_template
def __is_click_map(data: schemas.CreateCardSchema):
return data.metric_type == schemas.MetricType.click_map
def __get_click_map_chart(project_id, user_id, data: schemas.CreateCardSchema):
if len(data.series) == 0:
return None
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
return click_maps.search_short_session(project_id=project_id, user_id=user_id,
data=schemas.FlatClickMapSessionsSearch(**data.series[0].filter.dict()))
def merged_live(project_id, data: schemas.CreateCardSchema, user_id=None):
if data.is_template:
return get_predefined_metric(key=data.metric_of, project_id=project_id, data=data.dict())
elif __is_funnel_chart(data):
return __get_funnel_chart(project_id=project_id, data=data)
elif __is_errors_list(data):
return __get_errors_list(project_id=project_id, user_id=user_id, data=data)
elif __is_sessions_list(data):
return __get_sessions_list(project_id=project_id, user_id=user_id, data=data)
elif __is_click_map(data):
return __get_click_map_chart(project_id=project_id, user_id=user_id, data=data)
elif len(data.series) == 0:
return []
series_charts = __try_live(project_id=project_id, data=data)
if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table:
return series_charts
@ -113,12 +138,12 @@ def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema, user_id
return results
def __merge_metric_with_data(metric, data: Union[schemas.CustomMetricChartPayloadSchema,
schemas.CustomMetricSessionsPayloadSchema]) \
-> Union[schemas.CreateCustomMetricsSchema, None]:
def __merge_metric_with_data(metric: schemas.CreateCardSchema,
data: schemas.CardChartSchema) -> schemas.CreateCardSchema:
if data.series is not None and len(data.series) > 0:
metric["series"] = data.series
metric: schemas.CreateCustomMetricsSchema = schemas.CreateCustomMetricsSchema.parse_obj({**data.dict(), **metric})
metric.series = data.series
metric: schemas.CreateCardSchema = schemas.CreateCardSchema(
**{**data.dict(by_alias=True), **metric.dict(by_alias=True)})
if len(data.filters) > 0 or len(data.events) > 0:
for s in metric.series:
if len(data.filters) > 0:
@ -128,35 +153,22 @@ def __merge_metric_with_data(metric, data: Union[schemas.CustomMetricChartPayloa
return metric
def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema, metric=None):
def make_chart(project_id, user_id, metric_id, data: schemas.CardChartSchema, metric: schemas.CreateCardSchema = None):
if metric is None:
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
metric: schemas.CreateCardSchema = __merge_metric_with_data(metric=metric, data=data)
return merged_live(project_id=project_id, data=metric, user_id=user_id)
# if __is_funnel_chart(metric):
# return __get_funnel_chart(project_id=project_id, data=metric)
# elif __is_errors_list(metric):
# return __get_errors_list(project_id=project_id, user_id=user_id, data=metric)
#
# series_charts = __try_live(project_id=project_id, data=metric)
# if metric.view_type == schemas.MetricTimeseriesViewType.progress or metric.metric_type == schemas.MetricType.table:
# return series_charts
# results = [{}] * len(series_charts[0])
# for i in range(len(results)):
# for j, series_chart in enumerate(series_charts):
# results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"],
# metric.series[j].name: series_chart[i]["count"]}
# return results
def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
def get_sessions(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
raw_metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if raw_metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
metric: schemas.CreateCardSchema = schemas.CreateCardSchema(**raw_metric)
metric: schemas.CreateCardSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
results = []
@ -171,11 +183,12 @@ def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessi
return results
def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
raw_metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if raw_metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
metric: schemas.CreateCardSchema = schemas.CreateCardSchema(**raw_metric)
metric: schemas.CreateCardSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
for s in metric.series:
@ -187,11 +200,12 @@ def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CustomMetric
**funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)}
def get_errors_list(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
def get_errors_list(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
raw_metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if raw_metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
metric: schemas.CreateCardSchema = schemas.CreateCardSchema(**raw_metric)
metric: schemas.CreateCardSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
for s in metric.series:
@ -203,7 +217,7 @@ def get_errors_list(project_id, user_id, metric_id, data: schemas.CustomMetricSe
**errors.search(data=s.filter, project_id=project_id, user_id=user_id)}
def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadSchema):
def try_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
results = []
if data.series is None:
return results
@ -218,7 +232,7 @@ def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadS
return results
def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema, dashboard=False):
def create(project_id, user_id, data: schemas.CreateCardSchema, dashboard=False):
with pg_client.PostgresClient() as cur:
_data = {}
for i, s in enumerate(data.series):
@ -227,35 +241,35 @@ def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema, dashboa
_data[f"index_{i}"] = i
_data[f"filter_{i}"] = s.filter.json()
series_len = len(data.series)
data.series = None
params = {"user_id": user_id, "project_id": project_id,
"default_config": json.dumps(data.config.dict()),
**data.dict(), **_data}
query = cur.mogrify(f"""\
WITH m AS (INSERT INTO metrics (project_id, user_id, name, is_public,
view_type, metric_type, metric_of, metric_value,
metric_format, default_config)
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
%(metric_format)s, %(default_config)s)
RETURNING *)
INSERT
INTO metric_series(metric_id, index, name, filter)
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)"
for i in range(series_len)])}
RETURNING metric_id;""", params)
params = {"user_id": user_id, "project_id": project_id, **data.dict(), **_data}
params["default_config"] = json.dumps(data.default_config.dict())
query = """INSERT INTO metrics (project_id, user_id, name, is_public,
view_type, metric_type, metric_of, metric_value,
metric_format, default_config, thumbnail)
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
%(metric_format)s, %(default_config)s, %(thumbnail)s)
RETURNING metric_id"""
if len(data.series) > 0:
query = f"""WITH m AS ({query})
INSERT INTO metric_series(metric_id, index, name, filter)
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)"
for i in range(series_len)])}
RETURNING metric_id;"""
cur.execute(
query
)
query = cur.mogrify(query, params)
# print("-------")
# print(query)
# print("-------")
cur.execute(query)
r = cur.fetchone()
if dashboard:
return r["metric_id"]
return {"data": get(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)}
return {"data": get_card(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)}
def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSchema):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
def update(metric_id, user_id, project_id, data: schemas.UpdateCardSchema):
metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
return None
series_ids = [r["seriesId"] for r in metric["series"]]
@ -267,7 +281,7 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche
"user_id": user_id, "project_id": project_id, "view_type": data.view_type,
"metric_type": data.metric_type, "metric_of": data.metric_of,
"metric_value": data.metric_value, "metric_format": data.metric_format,
"config": json.dumps(data.config.dict())}
"config": json.dumps(data.default_config.dict()), "thumbnail": data.thumbnail}
for i, s in enumerate(data.series):
prefix = "u_"
if s.index is None:
@ -318,16 +332,42 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche
metric_of= %(metric_of)s, metric_value= %(metric_value)s,
metric_format= %(metric_format)s,
edited_at = timezone('utc'::text, now()),
default_config = %(config)s
default_config = %(config)s,
thumbnail = %(thumbnail)s
WHERE metric_id = %(metric_id)s
AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)
RETURNING metric_id;""", params)
cur.execute(query)
return get(metric_id=metric_id, project_id=project_id, user_id=user_id)
return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id)
def get_all(project_id, user_id, include_series=False):
# def __presign_thumbnail(card):
# if card["thumbnail_url"]:
# card["thumbnail_url"] = s3.client.generate_presigned_url(
# 'get_object',
# Params={'Bucket': config('THUMBNAILS_BUCKET'), 'Key': card["thumbnail_url"]},
# ExpiresIn=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900)
# )
def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_series=False):
constraints = ["metrics.project_id = %(project_id)s",
"metrics.deleted_at ISNULL"]
params = {"project_id": project_id, "user_id": user_id,
"offset": (data.page - 1) * data.limit,
"limit": data.limit, }
if data.mine_only:
constraints.append("user_id = %(user_id)s")
else:
constraints.append("(user_id = %(user_id)s OR metrics.is_public)")
if data.shared_only:
constraints.append("is_public")
if data.query is not None and len(data.query) > 0:
constraints.append("(name ILIKE %(query)s OR owner.owner_email ILIKE %(query)s)")
params["query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
with pg_client.PostgresClient() as cur:
sub_join = ""
if include_series:
@ -336,45 +376,55 @@ def get_all(project_id, user_id, include_series=False):
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)"""
cur.execute(
cur.mogrify(
f"""SELECT *
FROM metrics
{sub_join}
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
FROM (SELECT DISTINCT dashboard_id, name, is_public
FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id)
WHERE deleted_at ISNULL
AND dashboard_widgets.metric_id = metrics.metric_id
AND project_id = %(project_id)s
AND ((dashboards.user_id = %(user_id)s OR is_public))) AS connected_dashboards
) AS connected_dashboards ON (TRUE)
LEFT JOIN LATERAL (SELECT email AS owner_email
FROM users
WHERE deleted_at ISNULL
AND users.user_id = metrics.user_id
) AS owner ON (TRUE)
WHERE metrics.project_id = %(project_id)s
AND metrics.deleted_at ISNULL
AND (user_id = %(user_id)s OR metrics.is_public)
ORDER BY metrics.edited_at DESC, metrics.created_at DESC;""",
{"project_id": project_id, "user_id": user_id}
)
)
query = cur.mogrify(
f"""SELECT metric_id, project_id, user_id, name, is_public, created_at, edited_at,
metric_type, metric_of, metric_format, metric_value, view_type, is_pinned,
dashboards, owner_email, default_config AS config, thumbnail
FROM metrics
{sub_join}
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
FROM (SELECT DISTINCT dashboard_id, name, is_public
FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id)
WHERE deleted_at ISNULL
AND dashboard_widgets.metric_id = metrics.metric_id
AND project_id = %(project_id)s
AND ((dashboards.user_id = %(user_id)s OR is_public))) AS connected_dashboards
) AS connected_dashboards ON (TRUE)
LEFT JOIN LATERAL (SELECT email AS owner_email
FROM users
WHERE deleted_at ISNULL
AND users.user_id = metrics.user_id
) AS owner ON (TRUE)
WHERE {" AND ".join(constraints)}
ORDER BY created_at {data.order}
LIMIT %(limit)s OFFSET %(offset)s;""", params)
cur.execute(query)
rows = cur.fetchall()
if include_series:
for r in rows:
# r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
# __presign_thumbnail(r)
for s in r["series"]:
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
else:
for r in rows:
# __presign_thumbnail(r)
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r["edited_at"] = TimeUTC.datetime_to_timestamp(r["edited_at"])
rows = helper.list_to_camel_case(rows)
return rows
def get_all(project_id, user_id):
default_search = schemas.SearchCardsSchema()
result = rows = search_all(project_id=project_id, user_id=user_id, data=default_search)
while len(rows) == default_search.limit:
default_search.page += 1
rows = search_all(project_id=project_id, user_id=user_id, data=default_search)
result += rows
return result
def delete(project_id, metric_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
@ -390,37 +440,37 @@ def delete(project_id, metric_id, user_id):
return {"state": "success"}
def get(metric_id, project_id, user_id, flatten=True):
def get_card(metric_id, project_id, user_id, flatten=True):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT *, default_config AS config
FROM metrics
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
FROM (SELECT dashboard_id, name, is_public
FROM dashboards
WHERE deleted_at ISNULL
AND project_id = %(project_id)s
AND ((user_id = %(user_id)s OR is_public))) AS connected_dashboards
) AS connected_dashboards ON (TRUE)
LEFT JOIN LATERAL (SELECT email AS owner_email
FROM users
WHERE deleted_at ISNULL
AND users.user_id = metrics.user_id
) AS owner ON (TRUE)
WHERE metrics.project_id = %(project_id)s
AND metrics.deleted_at ISNULL
AND (metrics.user_id = %(user_id)s OR metrics.is_public)
AND metrics.metric_id = %(metric_id)s
ORDER BY created_at;""",
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
)
query = cur.mogrify(
"""SELECT *, default_config AS config
FROM metrics
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
FROM (SELECT dashboard_id, name, is_public
FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id)
WHERE deleted_at ISNULL
AND project_id = %(project_id)s
AND ((dashboards.user_id = %(user_id)s OR is_public))
AND metric_id = %(metric_id)s) AS connected_dashboards
) AS connected_dashboards ON (TRUE)
LEFT JOIN LATERAL (SELECT email AS owner_email
FROM users
WHERE deleted_at ISNULL
AND users.user_id = metrics.user_id
) AS owner ON (TRUE)
WHERE metrics.project_id = %(project_id)s
AND metrics.deleted_at ISNULL
AND (metrics.user_id = %(user_id)s OR metrics.is_public)
AND metrics.metric_id = %(metric_id)s
ORDER BY created_at;""",
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
)
cur.execute(query)
row = cur.fetchone()
if row is None:
return None
@ -443,9 +493,8 @@ def get_with_template(metric_id, project_id, user_id, include_dashboard=True):
AND project_id = %(project_id)s
AND ((user_id = %(user_id)s OR is_public))) AS connected_dashboards
) AS connected_dashboards ON (TRUE)"""
cur.execute(
cur.mogrify(
f"""SELECT *, default_config AS config
query = cur.mogrify(
f"""SELECT *, default_config AS config
FROM metrics
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
FROM metric_series
@ -458,9 +507,9 @@ def get_with_template(metric_id, project_id, user_id, include_dashboard=True):
AND (metrics.user_id = %(user_id)s OR metrics.is_public)
AND metrics.metric_id = %(metric_id)s
ORDER BY created_at;""",
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
)
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
)
cur.execute(query)
row = cur.fetchone()
return helper.dict_to_camel_case(row)
@ -499,17 +548,17 @@ def change_state(project_id, metric_id, user_id, status):
AND (user_id = %(user_id)s OR is_public);""",
{"metric_id": metric_id, "status": status, "user_id": user_id})
)
return get(metric_id=metric_id, project_id=project_id, user_id=user_id)
return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id)
def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
data: schemas.CustomMetricSessionsPayloadSchema
data: schemas.CardSessionsSchema
# , range_value=None, start_date=None, end_date=None
):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
metric: schemas.CreateCardSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
for s in metric.series:
@ -538,3 +587,91 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
issue=issue, data=s.filter)
if issue is not None else {"total": 0, "sessions": []},
"issue": issue}
def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardChartSchema):
raw_metric = get_with_template(metric_id=metric_id, project_id=project_id, user_id=user_id,
include_dashboard=False)
if raw_metric is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="card not found")
metric: schemas.CreateCardSchema = schemas.CreateCardSchema(**raw_metric)
if metric.is_template:
return get_predefined_metric(key=metric.metric_of, project_id=project_id, data=data.dict())
else:
return make_chart(project_id=project_id, user_id=user_id, metric_id=metric_id, data=data, metric=metric)
PREDEFINED = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions,
schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time,
schemas.MetricOfWebVitals.avg_page_load_time: metrics.get_application_activity_avg_page_load_time,
schemas.MetricOfWebVitals.avg_request_load_time: metrics.get_application_activity_avg_request_load_time,
schemas.MetricOfWebVitals.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start,
schemas.MetricOfWebVitals.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel,
schemas.MetricOfWebVitals.avg_visited_pages: metrics.get_user_activity_avg_visited_pages,
schemas.MetricOfWebVitals.avg_session_duration: metrics.get_user_activity_avg_session_duration,
schemas.MetricOfWebVitals.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfWebVitals.avg_pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfWebVitals.avg_response_time: metrics.get_top_metrics_avg_response_time,
schemas.MetricOfWebVitals.avg_first_paint: metrics.get_top_metrics_avg_first_paint,
schemas.MetricOfWebVitals.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded,
schemas.MetricOfWebVitals.avg_till_first_byte: metrics.get_top_metrics_avg_till_first_bit,
schemas.MetricOfWebVitals.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive,
schemas.MetricOfWebVitals.count_requests: metrics.get_top_metrics_count_requests,
schemas.MetricOfWebVitals.avg_time_to_render: metrics.get_time_to_render,
schemas.MetricOfWebVitals.avg_used_js_heap_size: metrics.get_memory_consumption,
schemas.MetricOfWebVitals.avg_cpu: metrics.get_avg_cpu,
schemas.MetricOfWebVitals.avg_fps: metrics.get_avg_fps,
schemas.MetricOfErrors.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors,
schemas.MetricOfErrors.domains_errors_4xx: metrics.get_domains_errors_4xx,
schemas.MetricOfErrors.domains_errors_5xx: metrics.get_domains_errors_5xx,
schemas.MetricOfErrors.errors_per_domains: metrics.get_errors_per_domains,
schemas.MetricOfErrors.calls_errors: metrics.get_calls_errors,
schemas.MetricOfErrors.errors_per_type: metrics.get_errors_per_type,
schemas.MetricOfErrors.resources_by_party: metrics.get_resources_by_party,
schemas.MetricOfPerformance.speed_location: metrics.get_speed_index_location,
schemas.MetricOfPerformance.slowest_domains: metrics.get_slowest_domains,
schemas.MetricOfPerformance.sessions_per_browser: metrics.get_sessions_per_browser,
schemas.MetricOfPerformance.time_to_render: metrics.get_time_to_render,
schemas.MetricOfPerformance.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages,
schemas.MetricOfPerformance.memory_consumption: metrics.get_memory_consumption,
schemas.MetricOfPerformance.cpu: metrics.get_avg_cpu,
schemas.MetricOfPerformance.fps: metrics.get_avg_fps,
schemas.MetricOfPerformance.crashes: metrics.get_crashes,
schemas.MetricOfPerformance.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete,
schemas.MetricOfPerformance.pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfPerformance.pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfPerformance.pages_response_time_distribution: metrics.get_pages_response_time_distribution,
schemas.MetricOfResources.missing_resources: metrics.get_missing_resources_trend,
schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources,
schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time,
schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, }
def get_predefined_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \
schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict):
return PREDEFINED.get(key, lambda *args: None)(project_id=project_id, **data)
# def add_thumbnail(metric_id, user_id, project_id):
# key = generate_file_key(project_id=project_id, key=f"{metric_id}.png")
# params = {"metric_id": metric_id, "user_id": user_id, "project_id": project_id, "key": key}
# with pg_client.PostgresClient() as cur:
# query = cur.mogrify(f"""\
# UPDATE metrics
# SET thumbnail_url = %(key)s
# WHERE metric_id = %(metric_id)s
# AND project_id = %(project_id)s
# AND (user_id = %(user_id)s OR is_public)
# RETURNING metric_id;""", params)
# cur.execute(query)
# row = cur.fetchone()
# if row is None:
# return {"errors": ["Card not found"]}
# return {"data": s3.get_presigned_url_for_upload(bucket=config('THUMBNAILS_BUCKET'), expires_in=180, key=key,
# # content-length-range is in bytes
# conditions=["content-length-range", 1, 1 * 1024 * 1024],
# content_type="image/png")}
#
#
# def generate_file_key(project_id, key):
# return f"{project_id}/cards/{key}"

View file

@ -1,48 +1,11 @@
import json
import schemas
from chalicelib.core import custom_metrics, metrics
from chalicelib.core import custom_metrics
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
# category name should be lower cased
CATEGORY_DESCRIPTION = {
'web vitals': 'A set of metrics that assess app performance on criteria such as load time, load performance, and stability.',
'custom': 'Previously created custom metrics by me and my team.',
'errors': 'Keep a closer eye on errors and track their type, origin and domain.',
'performance': 'Optimize your apps performance by tracking slow domains, page response times, memory consumption, CPU usage and more.',
'resources': 'Find out which resources are missing and those that may be slowing your web app.'
}
def get_templates(project_id, user_id):
with pg_client.PostgresClient() as cur:
pg_query = cur.mogrify(f"""SELECT category, jsonb_agg(metrics ORDER BY name) AS widgets
FROM (SELECT * , default_config AS config
FROM metrics LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index), '[]'::jsonb) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
WHERE deleted_at IS NULL
AND (project_id ISNULL OR (project_id = %(project_id)s AND (is_public OR user_id= %(userId)s)))
) AS metrics
GROUP BY category
ORDER BY ARRAY_POSITION(ARRAY ['custom','overview','errors','performance','resources'], category);""",
{"project_id": project_id, "userId": user_id})
cur.execute(pg_query)
rows = cur.fetchall()
for r in rows:
r["description"] = CATEGORY_DESCRIPTION.get(r["category"].lower(), "")
for w in r["widgets"]:
w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
for s in w["series"]:
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
return helper.list_to_camel_case(rows)
def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema):
with pg_client.PostgresClient() as cur:
@ -243,86 +206,18 @@ def pin_dashboard(project_id, user_id, dashboard_id):
return helper.dict_to_camel_case(row)
def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.CreateCustomMetricsSchema):
def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.CreateCardSchema):
metric_id = custom_metrics.create(project_id=project_id, user_id=user_id, data=data, dashboard=True)
return add_widget(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id,
data=schemas.AddWidgetToDashboardPayloadSchema(metricId=metric_id))
PREDEFINED = {schemas.TemplatePredefinedKeys.count_sessions: metrics.get_processed_sessions,
schemas.TemplatePredefinedKeys.avg_image_load_time: metrics.get_application_activity_avg_image_load_time,
schemas.TemplatePredefinedKeys.avg_page_load_time: metrics.get_application_activity_avg_page_load_time,
schemas.TemplatePredefinedKeys.avg_request_load_time: metrics.get_application_activity_avg_request_load_time,
schemas.TemplatePredefinedKeys.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start,
schemas.TemplatePredefinedKeys.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel,
schemas.TemplatePredefinedKeys.avg_visited_pages: metrics.get_user_activity_avg_visited_pages,
schemas.TemplatePredefinedKeys.avg_session_duration: metrics.get_user_activity_avg_session_duration,
schemas.TemplatePredefinedKeys.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.TemplatePredefinedKeys.avg_pages_response_time: metrics.get_pages_response_time,
schemas.TemplatePredefinedKeys.avg_response_time: metrics.get_top_metrics_avg_response_time,
schemas.TemplatePredefinedKeys.avg_first_paint: metrics.get_top_metrics_avg_first_paint,
schemas.TemplatePredefinedKeys.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded,
schemas.TemplatePredefinedKeys.avg_till_first_bit: metrics.get_top_metrics_avg_till_first_bit,
schemas.TemplatePredefinedKeys.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive,
schemas.TemplatePredefinedKeys.count_requests: metrics.get_top_metrics_count_requests,
schemas.TemplatePredefinedKeys.avg_time_to_render: metrics.get_time_to_render,
schemas.TemplatePredefinedKeys.avg_used_js_heap_size: metrics.get_memory_consumption,
schemas.TemplatePredefinedKeys.avg_cpu: metrics.get_avg_cpu,
schemas.TemplatePredefinedKeys.avg_fps: metrics.get_avg_fps,
schemas.TemplatePredefinedKeys.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors,
schemas.TemplatePredefinedKeys.domains_errors_4xx: metrics.get_domains_errors_4xx,
schemas.TemplatePredefinedKeys.domains_errors_5xx: metrics.get_domains_errors_5xx,
schemas.TemplatePredefinedKeys.errors_per_domains: metrics.get_errors_per_domains,
schemas.TemplatePredefinedKeys.calls_errors: metrics.get_calls_errors,
schemas.TemplatePredefinedKeys.errors_by_type: metrics.get_errors_per_type,
schemas.TemplatePredefinedKeys.errors_by_origin: metrics.get_resources_by_party,
schemas.TemplatePredefinedKeys.speed_index_by_location: metrics.get_speed_index_location,
schemas.TemplatePredefinedKeys.slowest_domains: metrics.get_slowest_domains,
schemas.TemplatePredefinedKeys.sessions_per_browser: metrics.get_sessions_per_browser,
schemas.TemplatePredefinedKeys.time_to_render: metrics.get_time_to_render,
schemas.TemplatePredefinedKeys.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages,
schemas.TemplatePredefinedKeys.memory_consumption: metrics.get_memory_consumption,
schemas.TemplatePredefinedKeys.cpu_load: metrics.get_avg_cpu,
schemas.TemplatePredefinedKeys.frame_rate: metrics.get_avg_fps,
schemas.TemplatePredefinedKeys.crashes: metrics.get_crashes,
schemas.TemplatePredefinedKeys.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete,
schemas.TemplatePredefinedKeys.pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.TemplatePredefinedKeys.pages_response_time: metrics.get_pages_response_time,
schemas.TemplatePredefinedKeys.pages_response_time_distribution: metrics.get_pages_response_time_distribution,
schemas.TemplatePredefinedKeys.missing_resources: metrics.get_missing_resources_trend,
schemas.TemplatePredefinedKeys.slowest_resources: metrics.get_slowest_resources,
schemas.TemplatePredefinedKeys.resources_fetch_time: metrics.get_resources_loading_time,
schemas.TemplatePredefinedKeys.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
schemas.TemplatePredefinedKeys.resources_count_by_type: metrics.get_resources_count_by_type,
}
def get_predefined_metric(key: schemas.TemplatePredefinedKeys, project_id: int, data: dict):
return PREDEFINED.get(key, lambda *args: None)(project_id=project_id, **data)
def make_chart_metrics(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema):
raw_metric = custom_metrics.get_with_template(metric_id=metric_id, project_id=project_id, user_id=user_id,
include_dashboard=False)
if raw_metric is None:
return None
metric: schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate.parse_obj(raw_metric)
if metric.is_template and metric.predefined_key is None:
return None
if metric.is_template:
return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict())
else:
return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=metric_id, data=data,
metric=raw_metric)
def make_chart_widget(dashboard_id, project_id, user_id, widget_id, data: schemas.CustomMetricChartPayloadSchema):
raw_metric = get_widget(widget_id=widget_id, project_id=project_id, user_id=user_id, dashboard_id=dashboard_id)
if raw_metric is None:
return None
metric = schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate.parse_obj(raw_metric)
if metric.is_template:
return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict())
else:
return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=raw_metric["metricId"],
data=data, metric=raw_metric)
# def make_chart_widget(dashboard_id, project_id, user_id, widget_id, data: schemas.CardChartSchema):
# raw_metric = get_widget(widget_id=widget_id, project_id=project_id, user_id=user_id, dashboard_id=dashboard_id)
# if raw_metric is None:
# return None
# metric = schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate(**raw_metric)
# if metric.is_template:
# return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict())
# else:
# return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=raw_metric["metricId"],
# data=data, metric=raw_metric)

View file

@ -2,6 +2,7 @@ import json
import schemas
from chalicelib.core import sourcemaps, sessions
from chalicelib.utils import errors_helper
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.metrics_helper import __get_step_size
@ -277,7 +278,7 @@ def get_details(project_id, error_id, user_id, **data):
status = cur.fetchone()
if status is not None:
row["stack"] = format_first_stack_frame(status).pop("stack")
row["stack"] = errors_helper.format_first_stack_frame(status).pop("stack")
row["status"] = status.pop("status")
row["parent_error_id"] = status.pop("parent_error_id")
row["favorite"] = status.pop("favorite")
@ -721,19 +722,6 @@ def __status_rank(status):
}.get(status)
def format_first_stack_frame(error):
error["stack"] = sourcemaps.format_payload(error.pop("payload"), truncate_to_first=True)
for s in error["stack"]:
for c in s.get("context", []):
for sci, sc in enumerate(c):
if isinstance(sc, str) and len(sc) > 1000:
c[sci] = sc[:1000]
# convert bytes to string:
if isinstance(s["filename"], bytes):
s["filename"] = s["filename"].decode("utf-8")
return error
def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(

View file

@ -1,16 +1,15 @@
import schemas
from chalicelib.core import issues
from chalicelib.core import metadata
from chalicelib.core import sessions_metas
from typing import Optional
import schemas
from chalicelib.core import autocomplete
from chalicelib.core import issues
from chalicelib.core import sessions_metas
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.event_filter_definition import SupportedFilter, Event
from chalicelib.core import autocomplete
def get_customs_by_sessionId2_pg(session_id, project_id):
def get_customs_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
SELECT
@ -40,7 +39,7 @@ def __get_grouped_clickrage(rows, session_id, project_id):
for c in click_rage_issues:
merge_count = c.get("payload")
if merge_count is not None:
merge_count = merge_count.get("count", 3)
merge_count = merge_count.get("Count", 3)
else:
merge_count = 3
for i in range(len(rows)):
@ -53,246 +52,53 @@ def __get_grouped_clickrage(rows, session_id, project_id):
return rows
def get_by_sessionId2_pg(session_id, project_id, group_clickrage=False):
def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
SELECT
c.*,
'CLICK' AS type
FROM events.clicks AS c
WHERE
c.session_id = %(session_id)s
ORDER BY c.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows = cur.fetchall()
if group_clickrage:
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
cur.execute(cur.mogrify("""
SELECT
i.*,
'INPUT' AS type
FROM events.inputs AS i
WHERE
i.session_id = %(session_id)s
ORDER BY i.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows += cur.fetchall()
cur.execute(cur.mogrify("""\
SELECT
l.*,
l.path AS value,
l.path AS url,
'LOCATION' AS type
FROM events.pages AS l
WHERE
l.session_id = %(session_id)s
ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
rows += cur.fetchall()
rows = []
if event_type is None or event_type == schemas.EventType.click:
cur.execute(cur.mogrify("""\
SELECT
c.*,
'CLICK' AS type
FROM events.clicks AS c
WHERE
c.session_id = %(session_id)s
ORDER BY c.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows += cur.fetchall()
if group_clickrage:
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
if event_type is None or event_type == schemas.EventType.input:
cur.execute(cur.mogrify("""
SELECT
i.*,
'INPUT' AS type
FROM events.inputs AS i
WHERE
i.session_id = %(session_id)s
ORDER BY i.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows += cur.fetchall()
if event_type is None or event_type == schemas.EventType.location:
cur.execute(cur.mogrify("""\
SELECT
l.*,
l.path AS value,
l.path AS url,
'LOCATION' AS type
FROM events.pages AS l
WHERE
l.session_id = %(session_id)s
ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
rows += cur.fetchall()
rows = helper.list_to_camel_case(rows)
rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"]))
return rows
def __pg_errors_query(source=None, value_length=None):
if value_length is None or value_length > 2:
return f"""((SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(value)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5));"""
return f"""((SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5));"""
def __search_pg_errors(project_id, value, key=None, source=None):
now = TimeUTC.now()
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(__pg_errors_query(source,
value_length=len(value)),
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value),
"source": source}))
results = helper.list_to_camel_case(cur.fetchall())
print(f"{TimeUTC.now() - now} : errors")
return results
def __search_pg_errors_ios(project_id, value, key=None, source=None):
now = TimeUTC.now()
if len(value) > 2:
query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.reason ILIKE %(svalue)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.reason ILIKE %(value)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
LIMIT 5);"""
else:
query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.reason ILIKE %(svalue)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
LIMIT 5);"""
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall())
print(f"{TimeUTC.now() - now} : errors")
return results
def __search_pg_metadata(project_id, value, key=None, source=None):
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
if len(meta_keys) == 0 or key is not None and key not in meta_keys.keys():
return []
sub_from = []
if key is not None:
meta_keys = {key: meta_keys[key]}
for k in meta_keys.keys():
colname = metadata.index_to_colname(meta_keys[k])
if len(value) > 2:
sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)
UNION
(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(value)s LIMIT 5))
""")
else:
sub_from.append(f"""(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
SELECT key, value, 'METADATA' AS TYPE
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall())
return results
class event_type:
class EventType:
CLICK = Event(ui_type=schemas.EventType.click, table="events.clicks", column="label")
INPUT = Event(ui_type=schemas.EventType.input, table="events.inputs", column="label")
LOCATION = Event(ui_type=schemas.EventType.location, table="events.pages", column="path")
@ -314,46 +120,46 @@ class event_type:
SUPPORTED_TYPES = {
event_type.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CLICK),
query=autocomplete.__generic_query(typename=event_type.CLICK.ui_type)),
event_type.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.INPUT),
query=autocomplete.__generic_query(typename=event_type.INPUT.ui_type)),
event_type.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.LOCATION),
query=autocomplete.__generic_query(
typename=event_type.LOCATION.ui_type)),
event_type.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CUSTOM),
query=autocomplete.__generic_query(typename=event_type.CUSTOM.ui_type)),
event_type.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.REQUEST),
EventType.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK),
query=autocomplete.__generic_query(typename=EventType.CLICK.ui_type)),
EventType.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT),
query=autocomplete.__generic_query(typename=EventType.INPUT.ui_type)),
EventType.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.LOCATION),
query=autocomplete.__generic_query(
typename=event_type.REQUEST.ui_type)),
event_type.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.GRAPHQL),
query=autocomplete.__generic_query(
typename=event_type.GRAPHQL.ui_type)),
event_type.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.STATEACTION),
query=autocomplete.__generic_query(
typename=event_type.STATEACTION.ui_type)),
event_type.ERROR.ui_type: SupportedFilter(get=__search_pg_errors,
query=None),
event_type.METADATA.ui_type: SupportedFilter(get=__search_pg_metadata,
query=None),
# IOS
event_type.CLICK_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CLICK_IOS),
query=autocomplete.__generic_query(
typename=event_type.CLICK_IOS.ui_type)),
event_type.INPUT_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.INPUT_IOS),
query=autocomplete.__generic_query(
typename=event_type.INPUT_IOS.ui_type)),
event_type.VIEW_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.VIEW_IOS),
query=autocomplete.__generic_query(
typename=event_type.VIEW_IOS.ui_type)),
event_type.CUSTOM_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CUSTOM_IOS),
typename=EventType.LOCATION.ui_type)),
EventType.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM),
query=autocomplete.__generic_query(typename=EventType.CUSTOM.ui_type)),
EventType.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST),
query=autocomplete.__generic_query(
typename=EventType.REQUEST.ui_type)),
EventType.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.GRAPHQL),
query=autocomplete.__generic_query(
typename=EventType.GRAPHQL.ui_type)),
EventType.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.STATEACTION),
query=autocomplete.__generic_query(
typename=event_type.CUSTOM_IOS.ui_type)),
event_type.REQUEST_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.REQUEST_IOS),
query=autocomplete.__generic_query(
typename=event_type.REQUEST_IOS.ui_type)),
event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors_ios,
query=None),
typename=EventType.STATEACTION.ui_type)),
EventType.ERROR.ui_type: SupportedFilter(get=autocomplete.__search_errors,
query=None),
EventType.METADATA.ui_type: SupportedFilter(get=autocomplete.__search_metadata,
query=None),
# IOS
EventType.CLICK_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK_IOS),
query=autocomplete.__generic_query(
typename=EventType.CLICK_IOS.ui_type)),
EventType.INPUT_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT_IOS),
query=autocomplete.__generic_query(
typename=EventType.INPUT_IOS.ui_type)),
EventType.VIEW_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.VIEW_IOS),
query=autocomplete.__generic_query(
typename=EventType.VIEW_IOS.ui_type)),
EventType.CUSTOM_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM_IOS),
query=autocomplete.__generic_query(
typename=EventType.CUSTOM_IOS.ui_type)),
EventType.REQUEST_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST_IOS),
query=autocomplete.__generic_query(
typename=EventType.REQUEST_IOS.ui_type)),
EventType.ERROR_IOS.ui_type: SupportedFilter(get=autocomplete.__search_errors_ios,
query=None),
}
@ -361,7 +167,7 @@ def get_errors_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
FROM {event_type.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
FROM {EventType.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
errors = cur.fetchall()
@ -378,11 +184,9 @@ def search(text, event_type, project_id, source, key):
rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source)
# for IOS events autocomplete
# if event_type + "_IOS" in SUPPORTED_TYPES.keys():
# rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
# source=source)
# rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,source=source)
elif event_type + "_IOS" in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
source=source)
rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, source=source)
elif event_type in sessions_metas.SUPPORTED_TYPES.keys():
return sessions_metas.search(text, event_type, project_id)
elif event_type.endswith("_IOS") \

View file

@ -7,8 +7,8 @@ def get_customs_by_sessionId(session_id, project_id):
cur.execute(cur.mogrify(f"""\
SELECT
c.*,
'{events.event_type.CUSTOM_IOS.ui_type}' AS type
FROM {events.event_type.CUSTOM_IOS.table} AS c
'{events.EventType.CUSTOM_IOS.ui_type}' AS type
FROM {events.EventType.CUSTOM_IOS.table} AS c
WHERE
c.session_id = %(session_id)s
ORDER BY c.timestamp;""",
@ -23,8 +23,8 @@ def get_by_sessionId(session_id, project_id):
cur.execute(cur.mogrify(f"""
SELECT
c.*,
'{events.event_type.CLICK_IOS.ui_type}' AS type
FROM {events.event_type.CLICK_IOS.table} AS c
'{events.EventType.CLICK_IOS.ui_type}' AS type
FROM {events.EventType.CLICK_IOS.table} AS c
WHERE
c.session_id = %(session_id)s
ORDER BY c.timestamp;""",
@ -35,8 +35,8 @@ def get_by_sessionId(session_id, project_id):
cur.execute(cur.mogrify(f"""
SELECT
i.*,
'{events.event_type.INPUT_IOS.ui_type}' AS type
FROM {events.event_type.INPUT_IOS.table} AS i
'{events.EventType.INPUT_IOS.ui_type}' AS type
FROM {events.EventType.INPUT_IOS.table} AS i
WHERE
i.session_id = %(session_id)s
ORDER BY i.timestamp;""",
@ -46,8 +46,8 @@ def get_by_sessionId(session_id, project_id):
cur.execute(cur.mogrify(f"""
SELECT
v.*,
'{events.event_type.VIEW_IOS.ui_type}' AS type
FROM {events.event_type.VIEW_IOS.table} AS v
'{events.EventType.VIEW_IOS.ui_type}' AS type
FROM {events.EventType.VIEW_IOS.table} AS v
WHERE
v.session_id = %(session_id)s
ORDER BY v.timestamp;""", {"project_id": project_id, "session_id": session_id}))
@ -61,7 +61,7 @@ def get_crashes_by_session_id(session_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""
SELECT cr.*,uc.*, cr.timestamp - s.start_ts AS time
FROM {events.event_type.ERROR_IOS.table} AS cr INNER JOIN public.crashes_ios AS uc USING (crash_id) INNER JOIN public.sessions AS s USING (session_id)
FROM {events.EventType.ERROR_IOS.table} AS cr INNER JOIN public.crashes_ios AS uc USING (crash_id) INNER JOIN public.sessions AS s USING (session_id)
WHERE
cr.session_id = %(session_id)s
ORDER BY timestamp;""", {"session_id": session_id}))

View file

@ -1,11 +1,10 @@
import json
from typing import List
import chalicelib.utils.helper
import schemas
from chalicelib.core import significance, sessions
from chalicelib.utils import dev
from chalicelib.utils import helper, pg_client
from chalicelib.utils import sql_helper as sh
from chalicelib.utils.TimeUTC import TimeUTC
REMOVE_KEYS = ["key", "_key", "startDate", "endDate"]
@ -39,7 +38,7 @@ def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]):
if not isinstance(e.value, list):
e.value = [e.value]
is_any = sessions._isAny_opreator(e.operator)
is_any = sh.isAny_opreator(e.operator)
if not is_any and isinstance(e.value, list) and len(e.value) == 0:
continue
events.append(e)
@ -156,7 +155,7 @@ def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date
def get_possible_issue_types(project_id):
return [{"type": t, "title": chalicelib.utils.helper.get_issue_title(t)} for t in
return [{"type": t, "title": helper.get_issue_title(t)} for t in
['click_rage', 'dead_click', 'excessive_scrolling',
'bad_request', 'missing_resource', 'memory', 'cpu',
'slow_resource', 'slow_page_load', 'crash', 'custom_event_error',
@ -193,7 +192,8 @@ def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=No
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
return sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(f["filter"]), project_id=project_id,
return sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(f["filter"]),
project_id=project_id,
user_id=user_id)
@ -252,7 +252,7 @@ def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.Fu
# def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema):
def get_top_insights_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema):
def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema):
data.events = filter_stages(__parse_events(data.events))
data.events = __fix_stages(data.events)
if len(data.events) == 0:
@ -301,7 +301,7 @@ def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSe
# def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema):
def get_issues_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema):
def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema):
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) < 0:

View file

@ -1,26 +1,74 @@
from chalicelib.utils import sql_helper as sh
import schemas
from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
def get_by_url(project_id, data):
args = {"startDate": data.get('startDate', TimeUTC.now(delta_days=-30)),
"endDate": data.get('endDate', TimeUTC.now()),
"project_id": project_id, "url": data["url"]}
def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema):
args = {"startDate": data.startDate, "endDate": data.endDate,
"project_id": project_id, "url": data.url}
constraints = ["sessions.project_id = %(project_id)s",
"(url = %(url)s OR path= %(url)s)",
"clicks.timestamp >= %(startDate)s",
"clicks.timestamp <= %(endDate)s",
"start_ts >= %(startDate)s",
"start_ts <= %(endDate)s",
"duration IS NOT NULL"]
query_from = "events.clicks INNER JOIN sessions USING (session_id)"
q_count = "count(1) AS count"
has_click_rage_filter = False
if len(data.filters) > 0:
for i, f in enumerate(data.filters):
if f.type == schemas.FilterType.issue and len(f.value) > 0:
has_click_rage_filter = True
q_count = "max(real_count) AS count,TRUE AS click_rage"
query_from += """INNER JOIN events_common.issues USING (timestamp, session_id)
INNER JOIN issues AS mis USING (issue_id)
INNER JOIN LATERAL (
SELECT COUNT(1) AS real_count
FROM events.clicks AS sc
INNER JOIN sessions as ss USING (session_id)
WHERE ss.project_id = 2
AND (sc.url = %(url)s OR sc.path = %(url)s)
AND sc.timestamp >= %(startDate)s
AND sc.timestamp <= %(endDate)s
AND ss.start_ts >= %(startDate)s
AND ss.start_ts <= %(endDate)s
AND sc.selector = clicks.selector) AS r_clicks ON (TRUE)"""
constraints += ["mis.project_id = %(project_id)s",
"issues.timestamp >= %(startDate)s",
"issues.timestamp <= %(endDate)s"]
f_k = f"issue_value{i}"
args = {**args, **sh.multi_values(f.value, value_key=f_k)}
constraints.append(sh.multi_conditions(f"%({f_k})s = ANY (issue_types)",
f.value, value_key=f_k))
constraints.append(sh.multi_conditions(f"mis.type = %({f_k})s",
f.value, value_key=f_k))
if len(f.filters) > 0:
for j, sf in enumerate(f.filters):
f_k = f"issue_svalue{i}{j}"
args = {**args, **sh.multi_values(sf.value, value_key=f_k)}
if sf.type == schemas.IssueFilterType._on_selector and len(sf.value) > 0:
constraints.append(sh.multi_conditions(f"clicks.selector = %({f_k})s",
sf.value, value_key=f_k))
if data.click_rage and not has_click_rage_filter:
constraints.append("""(issues.session_id IS NULL
OR (issues.timestamp >= %(startDate)s
AND issues.timestamp <= %(endDate)s
AND mis.project_id = %(project_id)s
AND mis.type = 'click_rage'))""")
q_count += ",COALESCE(bool_or(mis.issue_id IS NOT NULL), FALSE) AS click_rage"
query_from += """LEFT JOIN events_common.issues USING (timestamp, session_id)
LEFT JOIN issues AS mis USING (issue_id)"""
with pg_client.PostgresClient() as cur:
query = cur.mogrify("""SELECT selector, count(1) AS count
FROM events.clicks
INNER JOIN sessions USING (session_id)
WHERE project_id = %(project_id)s
AND url = %(url)s
AND timestamp >= %(startDate)s
AND timestamp <= %(endDate)s
AND start_ts >= %(startDate)s
AND start_ts <= %(endDate)s
AND duration IS NOT NULL
GROUP BY selector;""",
args)
query = cur.mogrify(f"""SELECT selector, {q_count}
FROM {query_from}
WHERE {" AND ".join(constraints)}
GROUP BY selector
LIMIT 500;""", args)
# print("---------")
# print(query.decode('UTF-8'))
# print("---------")
try:
cur.execute(query)
except Exception as err:
@ -31,4 +79,4 @@ def get_by_url(project_id, data):
print("--------------------")
raise err
rows = cur.fetchall()
return helper.dict_to_camel_case(rows)
return helper.list_to_camel_case(rows)

View file

@ -6,7 +6,7 @@ from chalicelib.utils import pg_client
MAX_INDEXES = 10
def _get_column_names():
def column_names():
return [f"metadata_{i}" for i in range(1, MAX_INDEXES + 1)]
@ -16,7 +16,7 @@ def get(project_id):
cur.mogrify(
f"""\
SELECT
{",".join(_get_column_names())}
{",".join(column_names())}
FROM public.projects
WHERE project_id = %(project_id)s AND deleted_at ISNULL
LIMIT 1;""", {"project_id": project_id})
@ -38,7 +38,7 @@ def get_batch(project_ids):
cur.mogrify(
f"""\
SELECT
project_id, {",".join(_get_column_names())}
project_id, {",".join(column_names())}
FROM public.projects
WHERE project_id IN %(project_ids)s
AND deleted_at ISNULL;""", {"project_ids": tuple(project_ids)})
@ -140,7 +140,7 @@ def add(tenant_id, project_id, new_name):
def search(tenant_id, project_id, key, value):
value = value + "%"
s_query = []
for f in _get_column_names():
for f in column_names():
s_query.append(f"CASE WHEN {f}=%(key)s THEN TRUE ELSE FALSE END AS {f}")
with pg_client.PostgresClient() as cur:
@ -215,7 +215,7 @@ def get_keys_by_projects(project_ids):
f"""\
SELECT
project_id,
{",".join(_get_column_names())}
{",".join(column_names())}
FROM public.projects
WHERE project_id IN %(project_ids)s AND deleted_at ISNULL;""",
{"project_ids": tuple(project_ids)})

View file

@ -127,7 +127,7 @@ def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=
{",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_id = %(project_id)s) AS last_recorded_session_at" if include_last_session else ""}
{',s.gdpr' if include_gdpr else ''}
FROM public.projects AS s
where s.project_id =%(project_id)s
WHERE s.project_id =%(project_id)s
AND s.deleted_at IS NULL
LIMIT 1;""",
{"project_id": project_id})
@ -148,7 +148,7 @@ def get_project_by_key(tenant_id, project_key, include_last_session=False, inclu
{",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_key = %(project_key)s) AS last_recorded_session_at" if include_last_session else ""}
{',s.gdpr' if include_gdpr else ''}
FROM public.projects AS s
where s.project_key =%(project_key)s
WHERE s.project_key =%(project_key)s
AND s.deleted_at IS NULL
LIMIT 1;""",
{"project_key": project_key})
@ -201,7 +201,7 @@ def count_by_tenant(tenant_id):
SELECT
count(s.project_id)
FROM public.projects AS s
where s.deleted_at IS NULL;""")
WHERE s.deleted_at IS NULL;""")
return cur.fetchone()["count"]
@ -212,7 +212,7 @@ def get_gdpr(project_id):
SELECT
gdpr
FROM public.projects AS s
where s.project_id =%(project_id)s
WHERE s.project_id =%(project_id)s
AND s.deleted_at IS NULL;""",
{"project_id": project_id})
)
@ -241,7 +241,7 @@ def get_internal_project_id(project_key):
cur.mogrify("""\
SELECT project_id
FROM public.projects
where project_key =%(project_key)s AND deleted_at ISNULL;""",
WHERE project_key =%(project_key)s AND deleted_at ISNULL;""",
{"project_key": project_key})
)
row = cur.fetchone()
@ -254,7 +254,7 @@ def get_project_key(project_id):
cur.mogrify("""\
SELECT project_key
FROM public.projects
where project_id =%(project_id)s AND deleted_at ISNULL;""",
WHERE project_id =%(project_id)s AND deleted_at ISNULL;""",
{"project_id": project_id})
)
project = cur.fetchone()
@ -268,7 +268,7 @@ def get_capture_status(project_id):
SELECT
sample_rate AS rate, sample_rate=100 AS capture_all
FROM public.projects
where project_id =%(project_id)s AND deleted_at ISNULL;""",
WHERE project_id =%(project_id)s AND deleted_at ISNULL;""",
{"project_id": project_id})
)
return helper.dict_to_camel_case(cur.fetchone())

View file

@ -2,9 +2,11 @@ from typing import List
import schemas
from chalicelib.core import events, metadata, events_ios, \
sessions_mobs, issues, projects, errors, resources, assist, performance_event, sessions_viewed, sessions_favorite, \
sessions_mobs, issues, projects, resources, assist, performance_event, sessions_favorite, \
sessions_devtool, sessions_notes
from chalicelib.utils import errors_helper
from chalicelib.utils import pg_client, helper, metrics_helper
from chalicelib.utils import sql_helper as sh
SESSION_PROJECTION_COLS = """s.project_id,
s.session_id::text AS session_id,
@ -60,7 +62,7 @@ def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_
s.session_id::text AS session_id,
(SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key
{"," if len(extra_query) > 0 else ""}{",".join(extra_query)}
{(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata._get_column_names()]) + ") AS project_metadata") if group_metadata else ''}
{(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''}
FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""}
WHERE s.project_id = %(project_id)s
AND s.session_id = %(session_id)s;""",
@ -84,16 +86,16 @@ def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_
session_id=session_id)
data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id)
else:
data['events'] = events.get_by_sessionId2_pg(project_id=project_id, session_id=session_id,
group_clickrage=True)
data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id,
group_clickrage=True)
all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id)
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
# to keep only the first stack
# limit the number of errors to reduce the response-body size
data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors
data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors
if e['source'] == "js_exception"][:500]
data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id,
session_id=session_id)
data['userEvents'] = events.get_customs_by_session_id(project_id=project_id,
session_id=session_id)
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id)
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id)
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id)
@ -114,67 +116,6 @@ def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_
return None
def __get_sql_operator(op: schemas.SearchEventOperator):
return {
schemas.SearchEventOperator._is: "=",
schemas.SearchEventOperator._is_any: "IN",
schemas.SearchEventOperator._on: "=",
schemas.SearchEventOperator._on_any: "IN",
schemas.SearchEventOperator._is_not: "!=",
schemas.SearchEventOperator._not_on: "!=",
schemas.SearchEventOperator._contains: "ILIKE",
schemas.SearchEventOperator._not_contains: "NOT ILIKE",
schemas.SearchEventOperator._starts_with: "ILIKE",
schemas.SearchEventOperator._ends_with: "ILIKE",
}.get(op, "=")
def __is_negation_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_not,
schemas.SearchEventOperator._not_on,
schemas.SearchEventOperator._not_contains]
def __reverse_sql_operator(op):
return "=" if op == "!=" else "!=" if op == "=" else "ILIKE" if op == "NOT ILIKE" else "NOT ILIKE"
def __get_sql_operator_multiple(op: schemas.SearchEventOperator):
return " IN " if op not in [schemas.SearchEventOperator._is_not, schemas.SearchEventOperator._not_on,
schemas.SearchEventOperator._not_contains] else " NOT IN "
def __get_sql_value_multiple(values):
if isinstance(values, tuple):
return values
return tuple(values) if isinstance(values, list) else (values,)
def _multiple_conditions(condition, values, value_key="value", is_not=False):
query = []
for i in range(len(values)):
k = f"{value_key}_{i}"
query.append(condition.replace(value_key, k))
return "(" + (" AND " if is_not else " OR ").join(query) + ")"
def _multiple_values(values, value_key="value"):
query_values = {}
if values is not None and isinstance(values, list):
for i in range(len(values)):
k = f"{value_key}_{i}"
query_values[k] = values[i]
return query_values
def _isAny_opreator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._on_any, schemas.SearchEventOperator._is_any]
def _isUndefined_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_undefined]
# This function executes the query and return result
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False):
@ -261,9 +202,9 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
ORDER BY s.session_id desc) AS filtred_sessions
ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""",
full_args)
# print("--------------------")
# print(main_query)
# print("--------------------")
print("--------------------")
print(main_query)
print("--------------------")
try:
cur.execute(main_query)
except Exception as err:
@ -304,13 +245,13 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType,
metric_of: schemas.TableMetricOfType, metric_value: List):
metric_of: schemas.MetricOfTable, metric_value: List):
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endDate, startTimestamp=data.startDate,
density=density, factor=1, decimal=True))
extra_event = None
if metric_of == schemas.TableMetricOfType.visited_url:
if metric_of == schemas.MetricOfTable.visited_url:
extra_event = "events.pages"
elif metric_of == schemas.TableMetricOfType.issues and len(metric_value) > 0:
elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
operator=schemas.SearchEventOperator._is))
full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False,
@ -353,18 +294,18 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
else:
sessions = cur.fetchone()["count"]
elif metric_type == schemas.MetricType.table:
if isinstance(metric_of, schemas.TableMetricOfType):
if isinstance(metric_of, schemas.MetricOfTable):
main_col = "user_id"
extra_col = ""
extra_where = ""
pre_query = ""
if metric_of == schemas.TableMetricOfType.user_country:
if metric_of == schemas.MetricOfTable.user_country:
main_col = "user_country"
elif metric_of == schemas.TableMetricOfType.user_device:
elif metric_of == schemas.MetricOfTable.user_device:
main_col = "user_device"
elif metric_of == schemas.TableMetricOfType.user_browser:
elif metric_of == schemas.MetricOfTable.user_browser:
main_col = "user_browser"
elif metric_of == schemas.TableMetricOfType.issues:
elif metric_of == schemas.MetricOfTable.issues:
main_col = "issue"
extra_col = f", UNNEST(s.issue_types) AS {main_col}"
if len(metric_value) > 0:
@ -374,7 +315,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
extra_where.append(f"{main_col} = %({arg_name})s")
full_args[arg_name] = metric_value[i]
extra_where = f"WHERE ({' OR '.join(extra_where)})"
elif metric_of == schemas.TableMetricOfType.visited_url:
elif metric_of == schemas.MetricOfTable.visited_url:
main_col = "path"
extra_col = ", path"
main_query = cur.mogrify(f"""{pre_query}
@ -420,7 +361,8 @@ def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema):
# this function generates the query and return the generated-query with the dict of query arguments
def search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id, user_id, extra_event=None):
def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue,
project_id, user_id, extra_event=None):
ss_constraints = []
full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate,
"projectId": project_id, "userId": user_id}
@ -438,15 +380,15 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
filter_type = f.type
f.value = helper.values_for_operator(value=f.value, op=f.operator)
f_k = f"f_value{i}"
full_args = {**full_args, **_multiple_values(f.value, value_key=f_k)}
op = __get_sql_operator(f.operator) \
full_args = {**full_args, **sh.multi_values(f.value, value_key=f_k)}
op = sh.get_sql_operator(f.operator) \
if filter_type not in [schemas.FilterType.events_count] else f.operator
is_any = _isAny_opreator(f.operator)
is_undefined = _isUndefined_operator(f.operator)
is_any = sh.isAny_opreator(f.operator)
is_undefined = sh.isUndefined_operator(f.operator)
if not is_any and not is_undefined and len(f.value) == 0:
continue
is_not = False
if __is_negation_operator(f.operator):
if sh.is_negation_operator(f.operator):
is_not = True
if filter_type == schemas.FilterType.user_browser:
if is_any:
@ -454,9 +396,10 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.user_browser IS NOT NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios]:
if is_any:
@ -464,9 +407,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.user_os IS NOT NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios]:
if is_any:
@ -474,9 +417,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.user_device IS NOT NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios]:
if is_any:
@ -484,9 +427,10 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.user_country IS NOT NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_source]:
if is_any:
@ -497,11 +441,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.utm_source IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f's.utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f'ms.utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_medium]:
if is_any:
extra_constraints.append('s.utm_medium IS NOT NULL')
@ -511,11 +455,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.utm_medium IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f's.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f'ms.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_campaign]:
if is_any:
extra_constraints.append('s.utm_campaign IS NOT NULL')
@ -525,11 +469,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.utm_campaign IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f's.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f'ms.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.duration:
if len(f.value) > 0 and f.value[0] is not None:
@ -546,8 +490,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
extra_constraints.append('s.base_referrer IS NOT NULL')
else:
extra_constraints.append(
_multiple_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
elif filter_type == events.event_type.METADATA.ui_type:
sh.multi_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == events.EventType.METADATA.ui_type:
# get metadata list only if you need it
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
@ -561,11 +506,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append(f"ms.{metadata.index_to_colname(meta_keys[f.source])} IS NULL")
else:
extra_constraints.append(
_multiple_conditions(
sh.multi_conditions(
f"s.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text",
f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(
sh.multi_conditions(
f"ms.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text",
f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
@ -577,9 +522,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.user_id IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f"s.user_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f"s.user_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.user_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f"ms.user_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_ios]:
if is_any:
@ -590,11 +537,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.user_anonymous_id IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f"s.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
if is_any:
extra_constraints.append('s.rev_id IS NOT NULL')
@ -604,40 +551,58 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.rev_id IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f"s.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f"s.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f"ms.rev_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.platform:
# op = __get_sql_operator(f.operator)
# op = __ sh.get_sql_operator(f.operator)
extra_constraints.append(
_multiple_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.issue:
if is_any:
extra_constraints.append("array_length(s.issue_types, 1) > 0")
ss_constraints.append("array_length(ms.issue_types, 1) > 0")
else:
extra_constraints.append(
_multiple_conditions(f"%({f_k})s {op} ANY (s.issue_types)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"%({f_k})s {op} ANY (s.issue_types)", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not,
value_key=f_k))
# search sessions with click_rage on a specific selector
if len(f.filters) > 0 and schemas.IssueType.click_rage in f.value:
for j, sf in enumerate(f.filters):
if sf.operator == schemas.IssueFilterOperator._on_selector:
f_k = f"f_value{i}_{j}"
full_args = {**full_args, **sh.multi_values(sf.value, value_key=f_k)}
extra_constraints += ["mc.timestamp>=%(startDate)s",
"mc.timestamp<=%(endDate)s",
"mis.type='click_rage'",
sh.multi_conditions(f"mc.selector=%({f_k})s",
sf.value, is_not=is_not,
value_key=f_k)]
extra_from += """INNER JOIN events.clicks AS mc USING(session_id)
INNER JOIN events_common.issues USING (session_id,timestamp)
INNER JOIN public.issues AS mis USING (issue_id)\n"""
elif filter_type == schemas.FilterType.events_count:
extra_constraints.append(
_multiple_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
# ---------------------------------------------------------------------------
if len(data.events) > 0:
valid_events_count = 0
for event in data.events:
is_any = _isAny_opreator(event.operator)
is_any = sh.isAny_opreator(event.operator)
if not isinstance(event.value, list):
event.value = [event.value]
if __is_valid_event(is_any=is_any, event=event):
@ -649,16 +614,16 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
events_joiner = " UNION " if or_events else " INNER JOIN LATERAL "
for i, event in enumerate(data.events):
event_type = event.type
is_any = _isAny_opreator(event.operator)
is_any = sh.isAny_opreator(event.operator)
if not isinstance(event.value, list):
event.value = [event.value]
if not __is_valid_event(is_any=is_any, event=event):
continue
op = __get_sql_operator(event.operator)
op = sh.get_sql_operator(event.operator)
is_not = False
if __is_negation_operator(event.operator):
if sh.is_negation_operator(event.operator):
is_not = True
op = __reverse_sql_operator(op)
op = sh.reverse_sql_operator(op)
if event_index == 0 or or_events:
event_from = "%s INNER JOIN public.sessions AS ms USING (session_id)"
event_where = ["ms.project_id = %(projectId)s", "main.timestamp >= %(startDate)s",
@ -678,116 +643,120 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
if event.type != schemas.PerformanceEventType.time_between_events:
event.value = helper.values_for_operator(value=event.value, op=event.operator)
full_args = {**full_args,
**_multiple_values(event.value, value_key=e_k),
**_multiple_values(event.source, value_key=s_k)}
**sh.multi_values(event.value, value_key=e_k),
**sh.multi_values(event.source, value_key=s_k)}
if event_type == events.event_type.CLICK.ui_type:
event_from = event_from % f"{events.event_type.CLICK.table} AS main "
if event_type == events.EventType.CLICK.ui_type:
event_from = event_from % f"{events.EventType.CLICK.table} AS main "
if not is_any:
if event.operator == schemas.ClickEventExtraOperator._on_selector:
event_where.append(
sh.multi_conditions(f"main.selector = %({e_k})s", event.value, value_key=e_k))
else:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.CLICK.column} {op} %({e_k})s", event.value,
value_key=e_k))
elif event_type == events.EventType.INPUT.ui_type:
event_from = event_from % f"{events.EventType.INPUT.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.CLICK.column} {op} %({e_k})s", event.value,
value_key=e_k))
elif event_type == events.event_type.INPUT.ui_type:
event_from = event_from % f"{events.event_type.INPUT.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.INPUT.column} {op} %({e_k})s", event.value,
value_key=e_k))
sh.multi_conditions(f"main.{events.EventType.INPUT.column} {op} %({e_k})s", event.value,
value_key=e_k))
if event.source is not None and len(event.source) > 0:
event_where.append(_multiple_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key=f"custom{i}"))
full_args = {**full_args, **_multiple_values(event.source, value_key=f"custom{i}")}
event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key=f"custom{i}"))
full_args = {**full_args, **sh.multi_values(event.source, value_key=f"custom{i}")}
elif event_type == events.event_type.LOCATION.ui_type:
event_from = event_from % f"{events.event_type.LOCATION.table} AS main "
elif event_type == events.EventType.LOCATION.ui_type:
event_from = event_from % f"{events.EventType.LOCATION.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.LOCATION.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.CUSTOM.ui_type:
event_from = event_from % f"{events.event_type.CUSTOM.table} AS main "
sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.CUSTOM.ui_type:
event_from = event_from % f"{events.EventType.CUSTOM.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.CUSTOM.column} {op} %({e_k})s", event.value,
value_key=e_k))
elif event_type == events.event_type.REQUEST.ui_type:
event_from = event_from % f"{events.event_type.REQUEST.table} AS main "
sh.multi_conditions(f"main.{events.EventType.CUSTOM.column} {op} %({e_k})s", event.value,
value_key=e_k))
elif event_type == events.EventType.REQUEST.ui_type:
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k})s", event.value,
value_key=e_k))
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s", event.value,
value_key=e_k))
# elif event_type == events.event_type.GRAPHQL.ui_type:
# event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main "
# if not is_any:
# event_where.append(
# _multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k})s", event.value,
# value_key=e_k))
elif event_type == events.event_type.STATEACTION.ui_type:
event_from = event_from % f"{events.event_type.STATEACTION.table} AS main "
elif event_type == events.EventType.STATEACTION.ui_type:
event_from = event_from % f"{events.EventType.STATEACTION.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.STATEACTION.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.ERROR.ui_type:
event_from = event_from % f"{events.event_type.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)"
sh.multi_conditions(f"main.{events.EventType.STATEACTION.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.ERROR.ui_type:
event_from = event_from % f"{events.EventType.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)"
event.source = list(set(event.source))
if not is_any and event.value not in [None, "*", ""]:
event_where.append(
_multiple_conditions(f"(main1.message {op} %({e_k})s OR main1.name {op} %({e_k})s)",
event.value, value_key=e_k))
sh.multi_conditions(f"(main1.message {op} %({e_k})s OR main1.name {op} %({e_k})s)",
event.value, value_key=e_k))
if len(event.source) > 0 and event.source[0] not in [None, "*", ""]:
event_where.append(_multiple_conditions(f"main1.source = %({s_k})s", event.source, value_key=s_k))
event_where.append(sh.multi_conditions(f"main1.source = %({s_k})s", event.source, value_key=s_k))
# ----- IOS
elif event_type == events.event_type.CLICK_IOS.ui_type:
event_from = event_from % f"{events.event_type.CLICK_IOS.table} AS main "
elif event_type == events.EventType.CLICK_IOS.ui_type:
event_from = event_from % f"{events.EventType.CLICK_IOS.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.CLICK_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
sh.multi_conditions(f"main.{events.EventType.CLICK_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.INPUT_IOS.ui_type:
event_from = event_from % f"{events.event_type.INPUT_IOS.table} AS main "
elif event_type == events.EventType.INPUT_IOS.ui_type:
event_from = event_from % f"{events.EventType.INPUT_IOS.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.INPUT_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
sh.multi_conditions(f"main.{events.EventType.INPUT_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
if event.source is not None and len(event.source) > 0:
event_where.append(_multiple_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key="custom{i}"))
full_args = {**full_args, **_multiple_values(event.source, f"custom{i}")}
elif event_type == events.event_type.VIEW_IOS.ui_type:
event_from = event_from % f"{events.event_type.VIEW_IOS.table} AS main "
event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key="custom{i}"))
full_args = {**full_args, **sh.multi_values(event.source, f"custom{i}")}
elif event_type == events.EventType.VIEW_IOS.ui_type:
event_from = event_from % f"{events.EventType.VIEW_IOS.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.VIEW_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.CUSTOM_IOS.ui_type:
event_from = event_from % f"{events.event_type.CUSTOM_IOS.table} AS main "
sh.multi_conditions(f"main.{events.EventType.VIEW_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.CUSTOM_IOS.ui_type:
event_from = event_from % f"{events.EventType.CUSTOM_IOS.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.CUSTOM_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.REQUEST_IOS.ui_type:
event_from = event_from % f"{events.event_type.REQUEST_IOS.table} AS main "
sh.multi_conditions(f"main.{events.EventType.CUSTOM_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.REQUEST_IOS.ui_type:
event_from = event_from % f"{events.EventType.REQUEST_IOS.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.REQUEST_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.ERROR_IOS.ui_type:
event_from = event_from % f"{events.event_type.ERROR_IOS.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_id)"
sh.multi_conditions(f"main.{events.EventType.REQUEST_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.ERROR_IOS.ui_type:
event_from = event_from % f"{events.EventType.ERROR_IOS.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_id)"
if not is_any and event.value not in [None, "*", ""]:
event_where.append(
_multiple_conditions(f"(main1.reason {op} %({e_k})s OR main1.name {op} %({e_k})s)",
event.value, value_key=e_k))
sh.multi_conditions(f"(main1.reason {op} %({e_k})s OR main1.name {op} %({e_k})s)",
event.value, value_key=e_k))
elif event_type == schemas.PerformanceEventType.fetch_failed:
event_from = event_from % f"{events.event_type.REQUEST.table} AS main "
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k})s",
event.value, value_key=e_k))
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s",
event.value, value_key=e_k))
col = performance_event.get_col(event_type)
colname = col["column"]
event_where.append(f"main.{colname} = FALSE")
@ -801,7 +770,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
# colname = col["column"]
# tname = "main"
# e_k += "_custom"
# full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)}
# full_args = {**full_args, **_ sh.multiple_values(event.source, value_key=e_k)}
# event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " +
# _multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
# event.source, value_key=e_k))
@ -811,7 +780,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
schemas.PerformanceEventType.location_avg_cpu_load,
schemas.PerformanceEventType.location_avg_memory_usage
]:
event_from = event_from % f"{events.event_type.LOCATION.table} AS main "
event_from = event_from % f"{events.EventType.LOCATION.table} AS main "
col = performance_event.get_col(event_type)
colname = col["column"]
tname = "main"
@ -822,16 +791,16 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
f"{tname}.timestamp <= %(endDate)s"]
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.LOCATION.column} {op} %({e_k})s",
event.value, value_key=e_k))
sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s",
event.value, value_key=e_k))
e_k += "_custom"
full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)}
full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)}
event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " +
_multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
elif event_type == schemas.PerformanceEventType.time_between_events:
event_from = event_from % f"{getattr(events.event_type, event.value[0].type).table} AS main INNER JOIN {getattr(events.event_type, event.value[1].type).table} AS main2 USING(session_id) "
event_from = event_from % f"{getattr(events.EventType, event.value[0].type).table} AS main INNER JOIN {getattr(events.EventType, event.value[1].type).table} AS main2 USING(session_id) "
if not isinstance(event.value[0].value, list):
event.value[0].value = [event.value[0].value]
if not isinstance(event.value[1].value, list):
@ -843,98 +812,99 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
e_k1 = e_k + "_e1"
e_k2 = e_k + "_e2"
full_args = {**full_args,
**_multiple_values(event.value[0].value, value_key=e_k1),
**_multiple_values(event.value[1].value, value_key=e_k2)}
s_op = __get_sql_operator(event.value[0].operator)
**sh.multi_values(event.value[0].value, value_key=e_k1),
**sh.multi_values(event.value[1].value, value_key=e_k2)}
s_op = sh.get_sql_operator(event.value[0].operator)
event_where += ["main2.timestamp >= %(startDate)s", "main2.timestamp <= %(endDate)s"]
if event_index > 0 and not or_events:
event_where.append("main2.session_id=event_0.session_id")
is_any = _isAny_opreator(event.value[0].operator)
is_any = sh.isAny_opreator(event.value[0].operator)
if not is_any:
event_where.append(
_multiple_conditions(
f"main.{getattr(events.event_type, event.value[0].type).column} {s_op} %({e_k1})s",
sh.multi_conditions(
f"main.{getattr(events.EventType, event.value[0].type).column} {s_op} %({e_k1})s",
event.value[0].value, value_key=e_k1))
s_op = __get_sql_operator(event.value[1].operator)
is_any = _isAny_opreator(event.value[1].operator)
s_op = sh.get_sql_operator(event.value[1].operator)
is_any = sh.isAny_opreator(event.value[1].operator)
if not is_any:
event_where.append(
_multiple_conditions(
f"main2.{getattr(events.event_type, event.value[1].type).column} {s_op} %({e_k2})s",
sh.multi_conditions(
f"main2.{getattr(events.EventType, event.value[1].type).column} {s_op} %({e_k2})s",
event.value[1].value, value_key=e_k2))
e_k += "_custom"
full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)}
full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)}
event_where.append(
_multiple_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
sh.multi_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
elif event_type == schemas.EventType.request_details:
event_from = event_from % f"{events.event_type.REQUEST.table} AS main "
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
apply = False
for j, f in enumerate(event.filters):
is_any = _isAny_opreator(f.operator)
is_any = sh.isAny_opreator(f.operator)
if is_any or len(f.value) == 0:
continue
f.value = helper.values_for_operator(value=f.value, op=f.operator)
op = __get_sql_operator(f.operator)
op = sh.get_sql_operator(f.operator)
e_k_f = e_k + f"_fetch{j}"
full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)}
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
if f.type == schemas.FetchFilterType._url:
event_where.append(
_multiple_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k_f})s::text",
f.value, value_key=e_k_f))
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k_f})s::text",
f.value, value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._status_code:
event_where.append(
_multiple_conditions(f"main.status_code {f.operator} %({e_k_f})s::integer", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.status_code {f.operator} %({e_k_f})s::integer", f.value,
value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._method:
event_where.append(
_multiple_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._duration:
event_where.append(
_multiple_conditions(f"main.duration {f.operator} %({e_k_f})s::integer", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.duration {f.operator} %({e_k_f})s::integer", f.value,
value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._request_body:
event_where.append(
_multiple_conditions(f"main.request_body {op} %({e_k_f})s::text", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.request_body {op} %({e_k_f})s::text", f.value,
value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._response_body:
event_where.append(
_multiple_conditions(f"main.response_body {op} %({e_k_f})s::text", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.response_body {op} %({e_k_f})s::text", f.value,
value_key=e_k_f))
apply = True
else:
print(f"undefined FETCH filter: {f.type}")
if not apply:
continue
elif event_type == schemas.EventType.graphql:
event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main "
event_from = event_from % f"{events.EventType.GRAPHQL.table} AS main "
for j, f in enumerate(event.filters):
is_any = _isAny_opreator(f.operator)
is_any = sh.isAny_opreator(f.operator)
if is_any or len(f.value) == 0:
continue
f.value = helper.values_for_operator(value=f.value, op=f.operator)
op = __get_sql_operator(f.operator)
op = sh.get_sql_operator(f.operator)
e_k_f = e_k + f"_graphql{j}"
full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)}
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
if f.type == schemas.GraphqlFilterType._name:
event_where.append(
_multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k_f})s", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value,
value_key=e_k_f))
elif f.type == schemas.GraphqlFilterType._method:
event_where.append(
_multiple_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
elif f.type == schemas.GraphqlFilterType._request_body:
event_where.append(
_multiple_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
elif f.type == schemas.GraphqlFilterType._response_body:
event_where.append(
_multiple_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
else:
print(f"undefined GRAPHQL filter: {f.type}")
else:
@ -1005,7 +975,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
# b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')")
if errors_only:
extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
extra_from += f" INNER JOIN {events.EventType.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
extra_constraints.append("ser.source = 'js_exception'")
extra_constraints.append("ser.project_id = %(project_id)s")
# if error_status != schemas.ErrorStatus.all:

View file

@ -3,9 +3,10 @@ from urllib.parse import urljoin
from decouple import config
import schemas
from chalicelib.core import sessions
from chalicelib.core.collaboration_msteams import MSTeams
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import pg_client, helper
from chalicelib.utils import sql_helper as sh
from chalicelib.utils.TimeUTC import TimeUTC
@ -57,8 +58,8 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se
if data.tags and len(data.tags) > 0:
k = "tag_value"
conditions.append(
sessions._multiple_conditions(f"%({k})s = sessions_notes.tag", data.tags, value_key=k))
extra_params = sessions._multiple_values(data.tags, value_key=k)
sh.multi_conditions(f"%({k})s = sessions_notes.tag", data.tags, value_key=k))
extra_params = sh.multi_values(data.tags, value_key=k)
if data.shared_only:
conditions.append("sessions_notes.is_public")
elif data.mine_only:
@ -166,3 +167,60 @@ def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id):
webhook_id=webhook_id,
body={"blocks": blocks}
)
def share_to_msteams(tenant_id, user_id, project_id, note_id, webhook_id):
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
if note is None:
return {"errors": ["Note not found"]}
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}")
if note["timestamp"] > 0:
session_url += f"&jumpto={note['timestamp']}"
title = f"[Note for session {note['sessionId']}]({session_url})"
blocks = [{
"type": "TextBlock",
"text": title,
"style": "heading",
"size": "Large"
},
{
"type": "TextBlock",
"spacing": "Small",
"text": note["message"]
}
]
if note["tag"]:
blocks.append({"type": "TextBlock",
"spacing": "Small",
"text": f"Tag: *{note['tag']}*",
"size": "Small"})
bottom = f"Created by {note['creatorName'].capitalize()}"
if user_id != note["userId"]:
bottom += f"\nSent by {note['shareName']}: "
blocks.append({"type": "TextBlock",
"spacing": "Default",
"text": bottom,
"size": "Small",
"fontType": "Monospace"})
return MSTeams.send_raw(
tenant_id=tenant_id,
webhook_id=webhook_id,
body={"type": "message",
"attachments": [
{"contentType": "application/vnd.microsoft.card.adaptive",
"contentUrl": None,
"content": {
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"type": "AdaptiveCard",
"version": "1.5",
"body": [{
"type": "ColumnSet",
"style": "emphasis",
"separator": True,
"bleed": True,
"columns": [{"width": "stretch",
"items": blocks,
"type": "Column"}]
}]}}
]})

View file

@ -1,6 +1,7 @@
__author__ = "AZNAUROV David"
__maintainer__ = "KRAIEM Taha Yassine"
from chalicelib.utils import sql_helper as sh
import schemas
from chalicelib.core import events, metadata, sessions
@ -49,33 +50,33 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
continue
f["value"] = helper.values_for_operator(value=f["value"], op=f["operator"])
# filter_args = _multiple_values(f["value"])
op = sessions.__get_sql_operator(f["operator"])
op = sh.get_sql_operator(f["operator"])
filter_type = f["type"]
# values[f_k] = sessions.__get_sql_value_multiple(f["value"])
f_k = f"f_value{i}"
values = {**values,
**sessions._multiple_values(helper.values_for_operator(value=f["value"], op=f["operator"]),
value_key=f_k)}
**sh.multi_values(helper.values_for_operator(value=f["value"], op=f["operator"]),
value_key=f_k)}
if filter_type == schemas.FilterType.user_browser:
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.user_browser {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.user_browser {op} %({f_k})s', f["value"], value_key=f_k))
elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios]:
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.user_os {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.user_os {op} %({f_k})s', f["value"], value_key=f_k))
elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios]:
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.user_device {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.user_device {op} %({f_k})s', f["value"], value_key=f_k))
elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios]:
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.user_country {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.user_country {op} %({f_k})s', f["value"], value_key=f_k))
elif filter_type == schemas.FilterType.duration:
if len(f["value"]) > 0 and f["value"][0] is not None:
first_stage_extra_constraints.append(f's.duration >= %(minDuration)s')
@ -85,36 +86,36 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
values["maxDuration"] = f["value"][1]
elif filter_type == schemas.FilterType.referrer:
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
filter_extra_from = [f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"]
filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"]
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f"p.base_referrer {op} %({f_k})s", f["value"], value_key=f_k))
elif filter_type == events.event_type.METADATA.ui_type:
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f["value"], value_key=f_k))
elif filter_type == events.EventType.METADATA.ui_type:
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
# op = sessions.__get_sql_operator(f["operator"])
if f.get("key") in meta_keys.keys():
first_stage_extra_constraints.append(
sessions._multiple_conditions(
sh.multi_conditions(
f's.{metadata.index_to_colname(meta_keys[f["key"]])} {op} %({f_k})s', f["value"],
value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
# op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.user_id {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.user_id {op} %({f_k})s', f["value"], value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_ios]:
# op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.user_anonymous_id {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.user_anonymous_id {op} %({f_k})s', f["value"], value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
# op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.rev_id {op} %({f_k})s', f["value"], value_key=f_k))
sh.multi_conditions(f's.rev_id {op} %({f_k})s', f["value"], value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
i = -1
for s in stages:
@ -124,7 +125,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
if not isinstance(s["value"], list):
s["value"] = [s["value"]]
is_any = sessions._isAny_opreator(s["operator"])
is_any = sh.isAny_opreator(s["operator"])
if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0:
continue
i += 1
@ -132,41 +133,41 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
else:
extra_from = []
op = sessions.__get_sql_operator(s["operator"])
op = sh.get_sql_operator(s["operator"])
event_type = s["type"].upper()
if event_type == events.event_type.CLICK.ui_type:
next_table = events.event_type.CLICK.table
next_col_name = events.event_type.CLICK.column
elif event_type == events.event_type.INPUT.ui_type:
next_table = events.event_type.INPUT.table
next_col_name = events.event_type.INPUT.column
elif event_type == events.event_type.LOCATION.ui_type:
next_table = events.event_type.LOCATION.table
next_col_name = events.event_type.LOCATION.column
elif event_type == events.event_type.CUSTOM.ui_type:
next_table = events.event_type.CUSTOM.table
next_col_name = events.event_type.CUSTOM.column
if event_type == events.EventType.CLICK.ui_type:
next_table = events.EventType.CLICK.table
next_col_name = events.EventType.CLICK.column
elif event_type == events.EventType.INPUT.ui_type:
next_table = events.EventType.INPUT.table
next_col_name = events.EventType.INPUT.column
elif event_type == events.EventType.LOCATION.ui_type:
next_table = events.EventType.LOCATION.table
next_col_name = events.EventType.LOCATION.column
elif event_type == events.EventType.CUSTOM.ui_type:
next_table = events.EventType.CUSTOM.table
next_col_name = events.EventType.CUSTOM.column
# IOS --------------
elif event_type == events.event_type.CLICK_IOS.ui_type:
next_table = events.event_type.CLICK_IOS.table
next_col_name = events.event_type.CLICK_IOS.column
elif event_type == events.event_type.INPUT_IOS.ui_type:
next_table = events.event_type.INPUT_IOS.table
next_col_name = events.event_type.INPUT_IOS.column
elif event_type == events.event_type.VIEW_IOS.ui_type:
next_table = events.event_type.VIEW_IOS.table
next_col_name = events.event_type.VIEW_IOS.column
elif event_type == events.event_type.CUSTOM_IOS.ui_type:
next_table = events.event_type.CUSTOM_IOS.table
next_col_name = events.event_type.CUSTOM_IOS.column
elif event_type == events.EventType.CLICK_IOS.ui_type:
next_table = events.EventType.CLICK_IOS.table
next_col_name = events.EventType.CLICK_IOS.column
elif event_type == events.EventType.INPUT_IOS.ui_type:
next_table = events.EventType.INPUT_IOS.table
next_col_name = events.EventType.INPUT_IOS.column
elif event_type == events.EventType.VIEW_IOS.ui_type:
next_table = events.EventType.VIEW_IOS.table
next_col_name = events.EventType.VIEW_IOS.column
elif event_type == events.EventType.CUSTOM_IOS.ui_type:
next_table = events.EventType.CUSTOM_IOS.table
next_col_name = events.EventType.CUSTOM_IOS.column
else:
print("=================UNDEFINED")
continue
values = {**values, **sessions._multiple_values(helper.values_for_operator(value=s["value"], op=s["operator"]),
value_key=f"value{i + 1}")}
if sessions.__is_negation_operator(op) and i > 0:
op = sessions.__reverse_sql_operator(op)
values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]),
value_key=f"value{i + 1}")}
if sh.is_negation_operator(op) and i > 0:
op = sh.reverse_sql_operator(op)
main_condition = "left_not.session_id ISNULL"
extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id
FROM {next_table} AS s_main
@ -177,8 +178,8 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
if is_any:
main_condition = "TRUE"
else:
main_condition = sessions._multiple_conditions(f"main.{next_col_name} {op} %(value{i + 1})s",
values=s["value"], value_key=f"value{i + 1}")
main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s",
values=s["value"], value_key=f"value{i + 1}")
n_stages_query.append(f"""
(SELECT main.session_id,
{"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp
@ -319,7 +320,7 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues,
transitions ::: if transited from the first stage to the last - 1
else - 0
errors ::: a dictionary where the keys are all unique issues (currently context-wise)
errors ::: a dictionary WHERE the keys are all unique issues (currently context-wise)
the values are lists
if an issue happened between the first stage to the last - 1
else - 0

View file

@ -1,21 +0,0 @@
from datetime import datetime
from decouple import config
from chalicelib.core.collaboration_slack import Slack
def send_batch(notifications_list):
if notifications_list is None or len(notifications_list) == 0:
return
webhookId_map = {}
for n in notifications_list:
if n.get("destination") not in webhookId_map:
webhookId_map[n.get("destination")] = {"tenantId": n["notification"]["tenantId"], "batch": []}
webhookId_map[n.get("destination")]["batch"].append({"text": n["notification"]["description"] \
+ f"\n<{config('SITE_URL')}{n['notification']['buttonUrl']}|{n['notification']['buttonText']}>",
"title": n["notification"]["title"],
"title_link": n["notification"]["buttonUrl"],
"ts": datetime.now().timestamp()})
for batch in webhookId_map.keys():
Slack.send_batch(tenant_id=webhookId_map[batch]["tenantId"], webhook_id=batch,
attachments=webhookId_map[batch]["batch"])

View file

@ -1,4 +1,3 @@
import hashlib
from urllib.parse import urlparse
import requests
@ -8,17 +7,11 @@ from chalicelib.core import sourcemaps_parser
from chalicelib.utils import s3
def __get_key(project_id, url):
u = urlparse(url)
new_url = u.scheme + "://" + u.netloc + u.path
return f"{project_id}/{hashlib.md5(new_url.encode()).hexdigest()}"
def presign_share_urls(project_id, urls):
results = []
for u in urls:
results.append(s3.get_presigned_url_for_sharing(bucket=config('sourcemaps_bucket'), expires_in=120,
key=__get_key(project_id, u),
key=s3.generate_file_key_from_url(project_id, u),
check_exists=True))
return results
@ -28,7 +21,7 @@ def presign_upload_urls(project_id, urls):
for u in urls:
results.append(s3.get_presigned_url_for_upload(bucket=config('sourcemaps_bucket'),
expires_in=1800,
key=__get_key(project_id, u)))
key=s3.generate_file_key_from_url(project_id, u)))
return results
@ -94,7 +87,7 @@ def get_traces_group(project_id, payload):
file_exists_in_bucket = False
file_exists_in_server = False
file_url = u["absPath"]
key = __get_key(project_id, file_url) # use filename instead?
key = s3.generate_file_key_from_url(project_id, file_url) # use filename instead?
params_idx = file_url.find("?")
if file_url and len(file_url) > 0 \
and not (file_url[:params_idx] if params_idx > -1 else file_url).endswith(".js"):
@ -185,7 +178,7 @@ def fetch_missed_contexts(frames):
line = lines[l]
offset = c - MAX_COLUMN_OFFSET
if offset < 0: # if the line is shirt
if offset < 0: # if the line is short
offset = 0
frames[i]["context"].append([frames[i]["lineNo"], line[offset: c + MAX_COLUMN_OFFSET + 1]])
return frames

View file

@ -602,12 +602,12 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
)
r = cur.fetchone()
return r is not None \
and r.get("jwt_iat") is not None \
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \
or (jwt_aud.startswith("plugin") \
and (r["changed_at"] is None \
or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000)))
)
and r.get("jwt_iat") is not None \
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \
or (jwt_aud.startswith("plugin") \
and (r["changed_at"] is None \
or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000)))
)
def change_jwt_iat(user_id):
@ -648,9 +648,9 @@ def authenticate(email, password, for_change_password=False):
return True
r = helper.dict_to_camel_case(r)
jwt_iat = change_jwt_iat(r['userId'])
iat = TimeUTC.datetime_to_timestamp(jwt_iat)
return {
"jwt": authorizers.generate_jwt(r['userId'], r['tenantId'],
TimeUTC.datetime_to_timestamp(jwt_iat),
"jwt": authorizers.generate_jwt(r['userId'], r['tenantId'], iat=iat,
aud=f"front:{helper.get_stage_name()}"),
"email": email,
**r

View file

@ -12,7 +12,7 @@ def get_by_id(webhook_id):
cur.mogrify("""\
SELECT w.*
FROM public.webhooks AS w
where w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""",
WHERE w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""",
{"webhook_id": webhook_id})
)
w = helper.dict_to_camel_case(cur.fetchone())
@ -21,15 +21,14 @@ def get_by_id(webhook_id):
return w
def get(tenant_id, webhook_id):
def get_webhook(tenant_id, webhook_id, webhook_type='webhook'):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT
webhook_id AS integration_id, webhook_id AS id, w.*
FROM public.webhooks AS w
where w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""",
{"webhook_id": webhook_id})
cur.mogrify("""SELECT w.*
FROM public.webhooks AS w
WHERE w.webhook_id =%(webhook_id)s
AND deleted_at ISNULL AND type=%(webhook_type)s;""",
{"webhook_id": webhook_id, "webhook_type": webhook_type})
)
w = helper.dict_to_camel_case(cur.fetchone())
if w:
@ -40,11 +39,9 @@ def get(tenant_id, webhook_id):
def get_by_type(tenant_id, webhook_type):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT
w.webhook_id AS integration_id, w.webhook_id AS id,w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at
FROM public.webhooks AS w
WHERE w.type =%(type)s AND deleted_at ISNULL;""",
cur.mogrify("""SELECT w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at
FROM public.webhooks AS w
WHERE w.type =%(type)s AND deleted_at ISNULL;""",
{"type": webhook_type})
)
webhooks = helper.list_to_camel_case(cur.fetchall())
@ -55,22 +52,12 @@ def get_by_type(tenant_id, webhook_type):
def get_by_tenant(tenant_id, replace_none=False):
with pg_client.PostgresClient() as cur:
cur.execute("""\
SELECT
webhook_id AS integration_id, webhook_id AS id, w.*
FROM public.webhooks AS w
WHERE deleted_at ISNULL;"""
)
cur.execute("""SELECT w.*
FROM public.webhooks AS w
WHERE deleted_at ISNULL;""")
all = helper.list_to_camel_case(cur.fetchall())
if replace_none:
for w in all:
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
for k in w.keys():
if w[k] is None:
w[k] = ''
else:
for w in all:
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
for w in all:
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
return all
@ -83,7 +70,7 @@ def update(tenant_id, webhook_id, changes, replace_none=False):
UPDATE public.webhooks
SET {','.join(sub_query)}
WHERE webhook_id =%(id)s AND deleted_at ISNULL
RETURNING webhook_id AS integration_id, webhook_id AS id,*;""",
RETURNING *;""",
{"id": webhook_id, **changes})
)
w = helper.dict_to_camel_case(cur.fetchone())
@ -100,7 +87,7 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="",
query = cur.mogrify("""\
INSERT INTO public.webhooks(endpoint,auth_header,type,name)
VALUES (%(endpoint)s, %(auth_header)s, %(type)s,%(name)s)
RETURNING webhook_id AS integration_id, webhook_id AS id,*;""",
RETURNING *;""",
{"endpoint": endpoint, "auth_header": auth_header,
"type": webhook_type, "name": name})
cur.execute(

View file

@ -0,0 +1,14 @@
from chalicelib.core import sourcemaps
def format_first_stack_frame(error):
error["stack"] = sourcemaps.format_payload(error.pop("payload"), truncate_to_first=True)
for s in error["stack"]:
for c in s.get("context", []):
for sci, sc in enumerate(c):
if isinstance(sc, str) and len(sc) > 1000:
c[sci] = sc[:1000]
# convert bytes to string:
if isinstance(s["filename"], bytes):
s["filename"] = s["filename"].decode("utf-8")
return error

View file

@ -3,6 +3,7 @@ import random
import re
import string
from typing import Union
from urllib.parse import urlparse
from decouple import config
@ -98,7 +99,7 @@ TRACK_TIME = True
def allow_captcha():
return config("captcha_server", default=None) is not None and config("captcha_key", default=None) is not None \
and len(config("captcha_server")) > 0 and len(config("captcha_key")) > 0
and len(config("captcha_server")) > 0 and len(config("captcha_key")) > 0
def string_to_sql_like(value):
@ -304,3 +305,10 @@ def __time_value(row):
def is_saml2_available():
return config("hastSAML2", default=False, cast=bool)
def get_domain():
_url = config("SITE_URL")
if not _url.startswith("http"):
_url = "http://" + _url
return '.'.join(urlparse(_url).netloc.split(".")[-2:])

View file

@ -1,9 +1,13 @@
from botocore.exceptions import ClientError
from decouple import config
import hashlib
from datetime import datetime, timedelta
from urllib.parse import urlparse
import boto3
import botocore
from botocore.client import Config
from botocore.exceptions import ClientError
from decouple import config
from requests.models import PreparedRequest
if not config("S3_HOST", default=False):
client = boto3.client('s3')
@ -51,7 +55,7 @@ def get_presigned_url_for_sharing(bucket, expires_in, key, check_exists=False):
)
def get_presigned_url_for_upload(bucket, expires_in, key):
def get_presigned_url_for_upload_deprecated(bucket, expires_in, key, **args):
return client.generate_presigned_url(
'put_object',
Params={
@ -62,6 +66,28 @@ def get_presigned_url_for_upload(bucket, expires_in, key):
)
def get_presigned_url_for_upload(bucket, expires_in, key, conditions=None, public=False, content_type=None):
acl = 'private'
if public:
acl = 'public-read'
fields = {"acl": acl}
if content_type:
fields["Content-Type"] = content_type
url_parts = client.generate_presigned_post(
Bucket=bucket,
Key=key,
ExpiresIn=expires_in,
Fields=fields,
Conditions=conditions,
)
req = PreparedRequest()
req.prepare_url(f"{url_parts['url']}/{url_parts['fields']['key']}", url_parts['fields'])
return req.url
def get_file(source_bucket, source_key):
try:
result = client.get_object(
@ -88,3 +114,13 @@ def schedule_for_deletion(bucket, key):
s3_object.copy_from(CopySource={'Bucket': bucket, 'Key': key},
Expires=datetime.now() + timedelta(days=7),
MetadataDirective='REPLACE')
def generate_file_key(project_id, key):
return f"{project_id}/{hashlib.md5(key.encode()).hexdigest()}"
def generate_file_key_from_url(project_id, url):
u = urlparse(url)
new_url = u.scheme + "://" + u.netloc + u.path
return generate_file_key(project_id=project_id, key=new_url)

View file

@ -0,0 +1,53 @@
from typing import Union
import schemas
def get_sql_operator(op: Union[schemas.SearchEventOperator, schemas.ClickEventExtraOperator]):
return {
schemas.SearchEventOperator._is: "=",
schemas.SearchEventOperator._is_any: "IN",
schemas.SearchEventOperator._on: "=",
schemas.SearchEventOperator._on_any: "IN",
schemas.SearchEventOperator._is_not: "!=",
schemas.SearchEventOperator._not_on: "!=",
schemas.SearchEventOperator._contains: "ILIKE",
schemas.SearchEventOperator._not_contains: "NOT ILIKE",
schemas.SearchEventOperator._starts_with: "ILIKE",
schemas.SearchEventOperator._ends_with: "ILIKE",
}.get(op, "=")
def is_negation_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_not,
schemas.SearchEventOperator._not_on,
schemas.SearchEventOperator._not_contains]
def reverse_sql_operator(op):
return "=" if op == "!=" else "!=" if op == "=" else "ILIKE" if op == "NOT ILIKE" else "NOT ILIKE"
def multi_conditions(condition, values, value_key="value", is_not=False):
query = []
for i in range(len(values)):
k = f"{value_key}_{i}"
query.append(condition.replace(value_key, k))
return "(" + (" AND " if is_not else " OR ").join(query) + ")"
def multi_values(values, value_key="value"):
query_values = {}
if values is not None and isinstance(values, list):
for i in range(len(values)):
k = f"{value_key}_{i}"
query_values[k] = values[i]
return query_values
def isAny_opreator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._on_any, schemas.SearchEventOperator._is_any]
def isUndefined_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_undefined]

View file

@ -1,6 +0,0 @@
#!/bin/bash
cd utilities
nohup npm start &> /tmp/utilities.log &
cd ..
python env_handler.py
chalice local --no-autoreload --host 0.0.0.0 --stage ${ENTERPRISE_BUILD}

View file

@ -50,4 +50,5 @@ DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob
PRESIGNED_URL_EXPIRATION=3600
ASSIST_JWT_EXPIRATION=144000
ASSIST_JWT_SECRET=
PYTHONUNBUFFERED=1
PYTHONUNBUFFERED=1
THUMBNAILS_BUCKET=thumbnails

View file

@ -1,15 +1,15 @@
requests==2.28.1
urllib3==1.26.12
boto3==1.26.14
requests==2.28.2
urllib3==1.26.14
boto3==1.26.53
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.5.1
elasticsearch==8.6.0
jira==3.4.1
fastapi==0.87.0
fastapi==0.89.1
uvicorn[standard]==0.20.0
python-decouple==3.6
pydantic[email]==1.10.2
python-decouple==3.7
pydantic[email]==1.10.4
apscheduler==3.9.1.post1

View file

@ -1,15 +1,15 @@
requests==2.28.1
urllib3==1.26.12
boto3==1.26.14
requests==2.28.2
urllib3==1.26.14
boto3==1.26.53
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.5.1
elasticsearch==8.6.0
jira==3.4.1
fastapi==0.87.0
fastapi==0.89.1
uvicorn[standard]==0.20.0
python-decouple==3.6
pydantic[email]==1.10.2
python-decouple==3.7
pydantic[email]==1.10.4
apscheduler==3.9.1.post1

View file

@ -1,7 +1,8 @@
from typing import Union
from decouple import config
from fastapi import Depends, Body, HTTPException
from fastapi import Depends, Body, HTTPException, Response
from fastapi.responses import JSONResponse
from starlette import status
import schemas
@ -12,6 +13,7 @@ from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assig
log_tool_newrelic, announcements, log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, \
assist, mobile, signup, tenants, boarding, notifications, webhook, users, \
custom_metrics, saved_search, integrations_global
from chalicelib.core.collaboration_msteams import MSTeams
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import helper, captcha
from or_dependencies import OR_context
@ -39,13 +41,24 @@ def login(data: schemas.UserLoginSchema = Body(...)):
status_code=status.HTTP_401_UNAUTHORIZED,
detail=r["errors"][0]
)
r["smtp"] = helper.has_smtp()
return {
content = {
'jwt': r.pop('jwt'),
'data': {
"user": r
}
}
response = JSONResponse(content=content)
response.set_cookie(key="jwt", value=content['jwt'], domain=helper.get_domain(),
expires=config("JWT_EXPIRATION", cast=int))
return response
@app.get('/logout', tags=["login", "logout"])
def logout_user(response: Response, context: schemas.CurrentContext = Depends(OR_context)):
response.delete_cookie("jwt")
return {"data": "success"}
@app.post('/{projectId}/sessions/search', tags=["sessions"])
@ -67,8 +80,8 @@ def session_ids_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSc
@app.get('/{projectId}/events/search', tags=["events"])
def events_search(projectId: int, q: str,
type: Union[schemas.FilterType, schemas.EventType,
schemas.PerformanceEventType, schemas.FetchFilterType,
schemas.GraphqlFilterType, str] = None,
schemas.PerformanceEventType, schemas.FetchFilterType,
schemas.GraphqlFilterType, str] = None,
key: str = None, source: str = None, live: bool = False,
context: schemas.CurrentContext = Depends(OR_context)):
if len(q) == 0:
@ -104,21 +117,27 @@ def get_integrations_status(projectId: int, context: schemas.CurrentContext = De
return {"data": data}
@app.post('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"])
def integration_notify(projectId: int, integration: str, integrationId: int, source: str, sourceId: str,
@app.post('/{projectId}/integrations/{integration}/notify/{webhookId}/{source}/{sourceId}', tags=["integrations"])
def integration_notify(projectId: int, integration: str, webhookId: int, source: str, sourceId: str,
data: schemas.IntegrationNotificationSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
comment = None
if data.comment:
comment = data.comment
if integration == "slack":
args = {"tenant_id": context.tenant_id,
"user": context.email, "comment": comment, "project_id": projectId,
"integration_id": integrationId}
args = {"tenant_id": context.tenant_id,
"user": context.email, "comment": comment, "project_id": projectId,
"integration_id": webhookId}
if integration == schemas.WebhookType.slack:
if source == "sessions":
return Slack.share_session(session_id=sourceId, **args)
elif source == "errors":
return Slack.share_error(error_id=sourceId, **args)
elif integration == schemas.WebhookType.msteams:
if source == "sessions":
return MSTeams.share_session(session_id=sourceId, **args)
elif source == "errors":
return MSTeams.share_error(error_id=sourceId, **args)
return {"data": None}
@ -786,6 +805,15 @@ def create_project(data: schemas.CreateProjectSchema = Body(...),
return projects.create(tenant_id=context.tenant_id, user_id=context.user_id, data=data)
@app.get('/projects/{projectId}', tags=['projects'])
def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = projects.get_project(tenant_id=context.tenant_id, project_id=projectId, include_last_session=True,
include_gdpr=True)
if data is None:
return {"errors": ["project not found"]}
return {"data": data}
@app.put('/projects/{projectId}', tags=['projects'])
def edit_project(projectId: int, data: schemas.CreateProjectSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
@ -863,17 +891,18 @@ def get_boarding_state_integrations(context: schemas.CurrentContext = Depends(OR
@app.get('/integrations/slack/channels', tags=["integrations"])
def get_slack_channels(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": webhook.get_by_type(tenant_id=context.tenant_id, webhook_type='slack')}
return {"data": webhook.get_by_type(tenant_id=context.tenant_id, webhook_type=schemas.WebhookType.slack)}
@app.get('/integrations/slack/{integrationId}', tags=["integrations"])
def get_slack_webhook(integrationId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": webhook.get(tenant_id=context.tenant_id, webhook_id=integrationId)}
@app.get('/integrations/slack/{webhookId}', tags=["integrations"])
def get_slack_webhook(webhookId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": webhook.get_webhook(tenant_id=context.tenant_id, webhook_id=webhookId,
webhook_type=schemas.WebhookType.slack)}
@app.delete('/integrations/slack/{integrationId}', tags=["integrations"])
def delete_slack_integration(integrationId: int, context: schemas.CurrentContext = Depends(OR_context)):
return webhook.delete(context.tenant_id, integrationId)
@app.delete('/integrations/slack/{webhookId}', tags=["integrations"])
def delete_slack_integration(webhookId: int, context: schemas.CurrentContext = Depends(OR_context)):
return webhook.delete(context.tenant_id, webhookId)
@app.put('/webhooks', tags=["webhooks"])
@ -957,6 +986,44 @@ def get_limits(context: schemas.CurrentContext = Depends(OR_context)):
}
@app.get('/integrations/msteams/channels', tags=["integrations"])
def get_msteams_channels(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": webhook.get_by_type(tenant_id=context.tenant_id, webhook_type=schemas.WebhookType.msteams)}
@app.post('/integrations/msteams', tags=['integrations'])
def add_msteams_integration(data: schemas.AddCollaborationSchema,
context: schemas.CurrentContext = Depends(OR_context)):
n = MSTeams.add(tenant_id=context.tenant_id, data=data)
if n is None:
return {
"errors": [
"We couldn't send you a test message on your Microsoft Teams channel. Please verify your webhook url."]
}
return {"data": n}
@app.post('/integrations/msteams/{webhookId}', tags=['integrations'])
def edit_msteams_integration(webhookId: int, data: schemas.EditCollaborationSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if len(data.url) > 0:
old = webhook.get_webhook(tenant_id=context.tenant_id, webhook_id=webhookId,
webhook_type=schemas.WebhookType.msteams)
if old["endpoint"] != data.url:
if not MSTeams.say_hello(data.url):
return {
"errors": [
"We couldn't send you a test message on your Microsoft Teams channel. Please verify your webhook url."]
}
return {"data": webhook.update(tenant_id=context.tenant_id, webhook_id=webhookId,
changes={"name": data.name, "endpoint": data.url})}
@public_app.get('/general_stats', tags=["private"], include_in_schema=False)
def get_general_stats():
return {"data": {"sessions:": sessions.count_all()}}
@public_app.get('/', tags=["health"])
@public_app.post('/', tags=["health"])
@public_app.put('/', tags=["health"])

View file

@ -6,7 +6,7 @@ from starlette.responses import RedirectResponse, FileResponse
import schemas
from chalicelib.core import sessions, errors, errors_viewed, errors_favorite, sessions_assignments, heatmaps, \
sessions_favorite, assist, sessions_notes
sessions_favorite, assist, sessions_notes, click_maps
from chalicelib.core import sessions_viewed
from chalicelib.core import tenants, users, projects, license
from chalicelib.core import webhook
@ -60,19 +60,10 @@ def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)):
}}
@app.get('/projects/{projectId}', tags=['projects'])
def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = projects.get_project(tenant_id=context.tenant_id, project_id=projectId, include_last_session=True,
include_gdpr=True)
if data is None:
return {"errors": ["project not found"]}
return {"data": data}
@app.post('/integrations/slack', tags=['integrations'])
@app.put('/integrations/slack', tags=['integrations'])
def add_slack_client(data: schemas.AddSlackSchema, context: schemas.CurrentContext = Depends(OR_context)):
n = Slack.add_channel(tenant_id=context.tenant_id, url=data.url, name=data.name)
def add_slack_integration(data: schemas.AddCollaborationSchema, context: schemas.CurrentContext = Depends(OR_context)):
n = Slack.add(tenant_id=context.tenant_id, data=data)
if n is None:
return {
"errors": ["We couldn't send you a test message on your Slack channel. Please verify your webhook url."]
@ -81,10 +72,10 @@ def add_slack_client(data: schemas.AddSlackSchema, context: schemas.CurrentConte
@app.post('/integrations/slack/{integrationId}', tags=['integrations'])
def edit_slack_integration(integrationId: int, data: schemas.EditSlackSchema = Body(...),
def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if len(data.url) > 0:
old = webhook.get(tenant_id=context.tenant_id, webhook_id=integrationId)
old = webhook.get_webhook(tenant_id=context.tenant_id, webhook_id=integrationId)
if old["endpoint"] != data.url:
if not Slack.say_hello(data.url):
return {
@ -153,11 +144,6 @@ def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] =
m_key=key, project_id=projectId)}
@public_app.get('/general_stats', tags=["private"], include_in_schema=False)
def get_general_stats():
return {"data": {"sessions:": sessions.count_all()}}
@app.get('/projects', tags=['projects'])
def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True,
@ -310,7 +296,7 @@ def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
@app.post('/{projectId}/heatmaps/url', tags=["heatmaps"])
def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": heatmaps.get_by_url(project_id=projectId, data=data.dict())}
return {"data": heatmaps.get_by_url(project_id=projectId, data=data)}
@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"])
@ -408,6 +394,13 @@ def share_note_to_slack(projectId: int, noteId: int, webhookId: int,
note_id=noteId, webhook_id=webhookId)
@app.get('/{projectId}/notes/{noteId}/msteams/{webhookId}', tags=["sessions", "notes"])
def share_note_to_msteams(projectId: int, noteId: int, webhookId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return sessions_notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId)
@app.post('/{projectId}/notes', tags=["sessions", "notes"])
def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
@ -416,3 +409,9 @@ def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
if "errors" in data:
return data
return {'data': data}
@app.post('/{projectId}/click_maps/search', tags=["click maps"])
def click_map_search(projectId: int, data: schemas.FlatClickMapSessionsSearch = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)}

View file

@ -1,3 +1,6 @@
from apscheduler.triggers.cron import CronTrigger
from apscheduler.triggers.interval import IntervalTrigger
from chalicelib.core import telemetry
from chalicelib.core import weekly_report, jobs
@ -15,7 +18,10 @@ async def telemetry_cron() -> None:
cron_jobs = [
{"func": telemetry_cron, "trigger": "cron", "day_of_week": "*"},
{"func": run_scheduled_jobs, "trigger": "interval", "seconds": 60, "misfire_grace_time": 20},
{"func": weekly_report2, "trigger": "cron", "day_of_week": "mon", "hour": 5, "misfire_grace_time": 60 * 60}
{"func": telemetry_cron, "trigger": CronTrigger(day_of_week="*"),
"misfire_grace_time": 60 * 60, "max_instances": 1},
{"func": run_scheduled_jobs, "trigger": IntervalTrigger(minutes=1),
"misfire_grace_time": 20, "max_instances": 1},
{"func": weekly_report2, "trigger": CronTrigger(day_of_week="mon", hour=5),
"misfire_grace_time": 60 * 60, "max_instances": 1}
]

View file

@ -1,398 +0,0 @@
from fastapi import Body
import schemas
from chalicelib.core import metrics
from chalicelib.core import metadata
from chalicelib.utils import helper
from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@app.get('/{projectId}/dashboard/metadata', tags=["dashboard", "metrics"])
def get_metadata_map(projectId: int):
metamap = []
for m in metadata.get(project_id=projectId):
metamap.append({"name": m["key"], "key": f"metadata{m['index']}"})
return {"data": metamap}
@app.post('/{projectId}/dashboard/sessions', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/sessions', tags=["dashboard", "metrics"])
def get_dashboard_processed_sessions(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_processed_sessions(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/errors', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/errors', tags=["dashboard", "metrics"])
def get_dashboard_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_errors(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/errors_trend', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/errors_trend', tags=["dashboard", "metrics"])
def get_dashboard_errors_trend(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_errors_trend(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/application_activity', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/application_activity', tags=["dashboard", "metrics"])
def get_dashboard_application_activity(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_application_activity(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/page_metrics', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/page_metrics', tags=["dashboard", "metrics"])
def get_dashboard_page_metrics(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_page_metrics(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/user_activity', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/user_activity', tags=["dashboard", "metrics"])
def get_dashboard_user_activity(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_user_activity(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/performance', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/performance', tags=["dashboard", "metrics"])
def get_dashboard_performance(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_performance(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/slowest_images', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/slowest_images', tags=["dashboard", "metrics"])
def get_dashboard_slowest_images(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_slowest_images(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/missing_resources', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/missing_resources', tags=["dashboard", "metrics"])
def get_performance_sessions(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_missing_resources_trend(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/network', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/network', tags=["dashboard", "metrics"])
def get_network_widget(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_network(project_id=projectId, **data.dict())}
@app.get('/{projectId}/dashboard/{widget}/search', tags=["dashboard", "metrics"])
def get_dashboard_autocomplete(projectId: int, widget: str, q: str, type: str = "", platform: str = None,
key: str = ""):
if q is None or len(q) == 0:
return {"data": []}
q = '^' + q
if widget in ['performance']:
data = metrics.search(q, type, project_id=projectId,
platform=platform, performance=True)
elif widget in ['pages', 'pages_dom_buildtime', 'top_metrics', 'time_to_render',
'impacted_sessions_by_slow_pages', 'pages_response_time']:
data = metrics.search(q, type, project_id=projectId,
platform=platform, pages_only=True)
elif widget in ['resources_loading_time']:
data = metrics.search(q, type, project_id=projectId,
platform=platform, performance=False)
elif widget in ['time_between_events', 'events']:
data = metrics.search(q, type, project_id=projectId,
platform=platform, performance=False, events_only=True)
elif widget in ['metadata']:
data = metrics.search(q, None, project_id=projectId,
platform=platform, metadata=True, key=key)
else:
return {"errors": [f"unsupported widget: {widget}"]}
return {'data': data}
# 1
@app.post('/{projectId}/dashboard/slowest_resources', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/slowest_resources', tags=["dashboard", "metrics"])
def get_dashboard_slowest_resources(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_slowest_resources(project_id=projectId, **data.dict())}
# 2
@app.post('/{projectId}/dashboard/resources_loading_time', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resources_loading_time', tags=["dashboard", "metrics"])
def get_dashboard_resources(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_resources_loading_time(project_id=projectId, **data.dict())}
# 3
@app.post('/{projectId}/dashboard/pages_dom_buildtime', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/pages_dom_buildtime', tags=["dashboard", "metrics"])
def get_dashboard_pages_dom(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_pages_dom_build_time(project_id=projectId, **data.dict())}
# 4
@app.post('/{projectId}/dashboard/busiest_time_of_day', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/busiest_time_of_day', tags=["dashboard", "metrics"])
def get_dashboard_busiest_time_of_day(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_busiest_time_of_day(project_id=projectId, **data.dict())}
# 5
@app.post('/{projectId}/dashboard/sessions_location', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/sessions_location', tags=["dashboard", "metrics"])
def get_dashboard_sessions_location(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_sessions_location(project_id=projectId, **data.dict())}
# 6
@app.post('/{projectId}/dashboard/speed_location', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/speed_location', tags=["dashboard", "metrics"])
def get_dashboard_speed_location(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_speed_index_location(project_id=projectId, **data.dict())}
# 7
@app.post('/{projectId}/dashboard/pages_response_time', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/pages_response_time', tags=["dashboard", "metrics"])
def get_dashboard_pages_response_time(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_pages_response_time(project_id=projectId, **data.dict())}
# 8
@app.post('/{projectId}/dashboard/pages_response_time_distribution', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/pages_response_time_distribution', tags=["dashboard", "metrics"])
def get_dashboard_pages_response_time_distribution(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_pages_response_time_distribution(project_id=projectId, **data.dict())}
# 9
@app.post('/{projectId}/dashboard/top_metrics', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/top_metrics', tags=["dashboard", "metrics"])
def get_dashboard_top_metrics(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_top_metrics(project_id=projectId, **data.dict())}
# 10
@app.post('/{projectId}/dashboard/time_to_render', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/time_to_render', tags=["dashboard", "metrics"])
def get_dashboard_time_to_render(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_time_to_render(project_id=projectId, **data.dict())}
# 11
@app.post('/{projectId}/dashboard/impacted_sessions_by_slow_pages', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/impacted_sessions_by_slow_pages', tags=["dashboard", "metrics"])
def get_dashboard_impacted_sessions_by_slow_pages(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_impacted_sessions_by_slow_pages(project_id=projectId, **data.dict())}
# 12
@app.post('/{projectId}/dashboard/memory_consumption', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/memory_consumption', tags=["dashboard", "metrics"])
def get_dashboard_memory_consumption(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_memory_consumption(project_id=projectId, **data.dict())}
# 12.1
@app.post('/{projectId}/dashboard/fps', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/fps', tags=["dashboard", "metrics"])
def get_dashboard_avg_fps(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_avg_fps(project_id=projectId, **data.dict())}
# 12.2
@app.post('/{projectId}/dashboard/cpu', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/cpu', tags=["dashboard", "metrics"])
def get_dashboard_avg_cpu(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_avg_cpu(project_id=projectId, **data.dict())}
# 13
@app.post('/{projectId}/dashboard/crashes', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/crashes', tags=["dashboard", "metrics"])
def get_dashboard_impacted_sessions_by_slow_pages(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_crashes(project_id=projectId, **data.dict())}
# 14
@app.post('/{projectId}/dashboard/domains_errors', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/domains_errors', tags=["dashboard", "metrics"])
def get_dashboard_domains_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_domains_errors(project_id=projectId, **data.dict())}
# 14.1
@app.post('/{projectId}/dashboard/domains_errors_4xx', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/domains_errors_4xx', tags=["dashboard", "metrics"])
def get_dashboard_domains_errors_4xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_domains_errors_4xx(project_id=projectId, **data.dict())}
# 14.2
@app.post('/{projectId}/dashboard/domains_errors_5xx', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/domains_errors_5xx', tags=["dashboard", "metrics"])
def get_dashboard_domains_errors_5xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_domains_errors_5xx(project_id=projectId, **data.dict())}
# 15
@app.post('/{projectId}/dashboard/slowest_domains', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/slowest_domains', tags=["dashboard", "metrics"])
def get_dashboard_slowest_domains(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_slowest_domains(project_id=projectId, **data.dict())}
# 16
@app.post('/{projectId}/dashboard/errors_per_domains', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/errors_per_domains', tags=["dashboard", "metrics"])
def get_dashboard_errors_per_domains(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_errors_per_domains(project_id=projectId, **data.dict())}
# 17
@app.post('/{projectId}/dashboard/sessions_per_browser', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/sessions_per_browser', tags=["dashboard", "metrics"])
def get_dashboard_sessions_per_browser(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_sessions_per_browser(project_id=projectId, **data.dict())}
# 18
@app.post('/{projectId}/dashboard/calls_errors', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/calls_errors', tags=["dashboard", "metrics"])
def get_dashboard_calls_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_calls_errors(project_id=projectId, **data.dict())}
# 18.1
@app.post('/{projectId}/dashboard/calls_errors_4xx', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/calls_errors_4xx', tags=["dashboard", "metrics"])
def get_dashboard_calls_errors_4xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_calls_errors_4xx(project_id=projectId, **data.dict())}
# 18.2
@app.post('/{projectId}/dashboard/calls_errors_5xx', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/calls_errors_5xx', tags=["dashboard", "metrics"])
def get_dashboard_calls_errors_5xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_calls_errors_5xx(project_id=projectId, **data.dict())}
# 19
@app.post('/{projectId}/dashboard/errors_per_type', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/errors_per_type', tags=["dashboard", "metrics"])
def get_dashboard_errors_per_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_errors_per_type(project_id=projectId, **data.dict())}
# 20
@app.post('/{projectId}/dashboard/resources_by_party', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resources_by_party', tags=["dashboard", "metrics"])
def get_dashboard_resources_by_party(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_resources_by_party(project_id=projectId, **data.dict())}
# 21
@app.post('/{projectId}/dashboard/resource_type_vs_response_end', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resource_type_vs_response_end', tags=["dashboard", "metrics"])
def get_dashboard_errors_per_resource_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.resource_type_vs_response_end(project_id=projectId, **data.dict())}
# 22
@app.post('/{projectId}/dashboard/resources_vs_visually_complete', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resources_vs_visually_complete', tags=["dashboard", "metrics"])
def get_dashboard_resources_vs_visually_complete(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_resources_vs_visually_complete(project_id=projectId, **data.dict())}
# 23
@app.post('/{projectId}/dashboard/impacted_sessions_by_js_errors', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/impacted_sessions_by_js_errors', tags=["dashboard", "metrics"])
def get_dashboard_impacted_sessions_by_js_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_impacted_sessions_by_js_errors(project_id=projectId, **data.dict())}
# 24
@app.post('/{projectId}/dashboard/resources_count_by_type', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resources_count_by_type', tags=["dashboard", "metrics"])
def get_dashboard_resources_count_by_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_resources_count_by_type(project_id=projectId, **data.dict())}
# # 25
# @app.post('/{projectId}/dashboard/time_between_events', tags=["dashboard", "metrics"])
# @app.get('/{projectId}/dashboard/time_between_events', tags=["dashboard", "metrics"])
# def get_dashboard_resources_count_by_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
# return {"errors": ["please choose 2 events"]}
@app.post('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"])
def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
results = [
{"key": "count_sessions",
"data": metrics.get_processed_sessions(project_id=projectId, **data.dict())},
*helper.explode_widget(data={**metrics.get_application_activity(project_id=projectId, **data.dict()),
"chart": metrics.get_performance(project_id=projectId, **data.dict())
.get("chart", [])}),
*helper.explode_widget(data=metrics.get_page_metrics(project_id=projectId, **data.dict())),
*helper.explode_widget(data=metrics.get_user_activity(project_id=projectId, **data.dict())),
{"key": "avg_pages_dom_buildtime",
"data": metrics.get_pages_dom_build_time(project_id=projectId, **data.dict())},
{"key": "avg_pages_response_time",
"data": metrics.get_pages_response_time(project_id=projectId, **data.dict())
},
*helper.explode_widget(metrics.get_top_metrics(project_id=projectId, **data.dict())),
{"key": "avg_time_to_render", "data": metrics.get_time_to_render(project_id=projectId, **data.dict())},
{"key": "avg_used_js_heap_size", "data": metrics.get_memory_consumption(project_id=projectId, **data.dict())},
{"key": "avg_cpu", "data": metrics.get_avg_cpu(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_fps,
"data": metrics.get_avg_fps(project_id=projectId, **data.dict())}
]
results = sorted(results, key=lambda r: r["key"])
return {"data": results}
@app.post('/{projectId}/dashboard/overview2', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/overview2', tags=["dashboard", "metrics"])
def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
results = [
{"key": schemas.TemplatePredefinedKeys.count_sessions,
"data": metrics.get_processed_sessions(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_image_load_time,
"data": metrics.get_application_activity_avg_image_load_time(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_page_load_time,
"data": metrics.get_application_activity_avg_page_load_time(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_request_load_time,
"data": metrics.get_application_activity_avg_request_load_time(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_dom_content_load_start,
"data": metrics.get_page_metrics_avg_dom_content_load_start(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_first_contentful_pixel,
"data": metrics.get_page_metrics_avg_first_contentful_pixel(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_visited_pages,
"data": metrics.get_user_activity_avg_visited_pages(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_session_duration,
"data": metrics.get_user_activity_avg_session_duration(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_pages_dom_buildtime,
"data": metrics.get_pages_dom_build_time(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_pages_response_time,
"data": metrics.get_pages_response_time(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_response_time,
"data": metrics.get_top_metrics_avg_response_time(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_first_paint,
"data": metrics.get_top_metrics_avg_first_paint(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_dom_content_loaded,
"data": metrics.get_top_metrics_avg_dom_content_loaded(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_till_first_bit,
"data": metrics.get_top_metrics_avg_till_first_bit(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_time_to_interactive,
"data": metrics.get_top_metrics_avg_time_to_interactive(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.count_requests,
"data": metrics.get_top_metrics_count_requests(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_time_to_render,
"data": metrics.get_time_to_render(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_used_js_heap_size,
"data": metrics.get_memory_consumption(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_cpu,
"data": metrics.get_avg_cpu(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_fps,
"data": metrics.get_avg_fps(project_id=projectId, **data.dict())}
]
results = sorted(results, key=lambda r: r["key"])
return {"data": results}

View file

@ -1,3 +1,5 @@
from typing import Union
from fastapi import Body, Depends
import schemas
@ -46,11 +48,12 @@ def pin_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentCont
return {"data": dashboards.pin_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)}
@app.post('/{projectId}/dashboards/{dashboardId}/cards', tags=["cards"])
@app.post('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"])
@app.put('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"])
def add_widget_to_dashboard(projectId: int, dashboardId: int,
data: schemas.AddWidgetToDashboardPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def add_card_to_dashboard(projectId: int, dashboardId: int,
data: schemas.AddWidgetToDashboardPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.add_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
data=data)}
@ -58,7 +61,7 @@ def add_widget_to_dashboard(projectId: int, dashboardId: int,
@app.post('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"])
@app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"])
def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int,
data: schemas.CreateCustomMetricsSchema = Body(...),
data: schemas.CreateCardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.create_metric_add_widget(project_id=projectId, user_id=context.user_id,
dashboard_id=dashboardId, data=data)}
@ -80,43 +83,41 @@ def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int
widget_id=widgetId)
@app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}/chart', tags=["dashboard"])
def get_widget_chart(projectId: int, dashboardId: int, widgetId: int,
data: schemas.CustomMetricChartPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = dashboards.make_chart_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
widget_id=widgetId, data=data)
if data is None:
return {"errors": ["widget not found"]}
return {"data": data}
@app.get('/{projectId}/metrics/templates', tags=["dashboard"])
def get_templates(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.get_templates(project_id=projectId, user_id=context.user_id)}
# @app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}/chart', tags=["dashboard"])
# def get_widget_chart(projectId: int, dashboardId: int, widgetId: int,
# data: schemas.CardChartSchema = Body(...),
# context: schemas.CurrentContext = Depends(OR_context)):
# data = dashboards.make_chart_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
# widget_id=widgetId, data=data)
# if data is None:
# return {"errors": ["widget not found"]}
# return {"data": data}
@app.post('/{projectId}/cards/try', tags=["cards"])
@app.post('/{projectId}/metrics/try', tags=["dashboard"])
@app.put('/{projectId}/metrics/try', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"])
def try_custom_metric(projectId: int, data: schemas.TryCustomMetricsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def try_card(projectId: int, data: schemas.CreateCardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.merged_live(project_id=projectId, data=data, user_id=context.user_id)}
@app.post('/{projectId}/cards/try/sessions', tags=["cards"])
@app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"])
def try_custom_metric_sessions(projectId: int, data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.try_sessions(project_id=projectId, user_id=context.user_id, data=data)
return {"data": data}
@app.post('/{projectId}/cards/try/issues', tags=["cards"])
@app.post('/{projectId}/metrics/try/issues', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try/issues', tags=["customMetrics"])
def try_custom_metric_funnel_issues(projectId: int, data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def try_card_funnel_issues(projectId: int, data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if len(data.series) == 0:
return {"data": []}
data.series[0].filter.startDate = data.startTimestamp
@ -125,46 +126,72 @@ def try_custom_metric_funnel_issues(projectId: int, data: schemas.CustomMetricSe
return {"data": data}
@app.get('/{projectId}/cards', tags=["cards"])
@app.get('/{projectId}/metrics', tags=["dashboard"])
@app.get('/{projectId}/custom_metrics', tags=["customMetrics"])
def get_cards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)}
@app.post('/{projectId}/cards', tags=["cards"])
@app.post('/{projectId}/metrics', tags=["dashboard"])
@app.put('/{projectId}/metrics', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics', tags=["customMetrics"])
def add_custom_metric(projectId: int, data: schemas.CreateCustomMetricsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def create_card(projectId: int, data: schemas.CreateCardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return custom_metrics.create(project_id=projectId, user_id=context.user_id, data=data)
@app.get('/{projectId}/metrics', tags=["dashboard"])
@app.get('/{projectId}/custom_metrics', tags=["customMetrics"])
def get_custom_metrics(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)}
@app.post('/{projectId}/cards/search', tags=["cards"])
@app.post('/{projectId}/metrics/search', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/search', tags=["customMetrics"])
def search_cards(projectId: int, data: schemas.SearchCardsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.search_all(project_id=projectId, user_id=context.user_id, data=data)}
@app.get('/{projectId}/cards/{metric_id}', tags=["cards"])
@app.get('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.get('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def get_custom_metric(projectId: int, metric_id: str, context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get(project_id=projectId, user_id=context.user_id, metric_id=metric_id)
def get_card(projectId: int, metric_id: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)):
if not isinstance(metric_id, int):
return {"errors": ["invalid card_id"]}
data = custom_metrics.get_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id)
if data is None:
return {"errors": ["custom metric not found"]}
return {"errors": ["card not found"]}
return {"data": data}
# @app.get('/{projectId}/cards/{metric_id}/thumbnail', tags=["cards"])
# def sign_thumbnail_for_upload(projectId: int, metric_id: Union[int, str],
# context: schemas.CurrentContext = Depends(OR_context)):
# if not isinstance(metric_id, int):
# return {"errors": ["invalid card_id"]}
# return custom_metrics.add_thumbnail(metric_id=metric_id, user_id=context.user_id, project_id=projectId)
@app.post('/{projectId}/cards/{metric_id}/sessions', tags=["cards"])
@app.post('/{projectId}/metrics/{metric_id}/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/sessions', tags=["customMetrics"])
def get_custom_metric_sessions(projectId: int, metric_id: int,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def get_card_sessions(projectId: int, metric_id: int,
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_sessions(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
if data is None:
return {"errors": ["custom metric not found"]}
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/issues', tags=["cards"])
@app.post('/{projectId}/metrics/{metric_id}/issues', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/issues', tags=["customMetrics"])
def get_custom_metric_funnel_issues(projectId: int, metric_id: int,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def get_card_funnel_issues(projectId: int, metric_id: Union[int, str],
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if not isinstance(metric_id, int):
return {"errors": [f"invalid card_id: {metric_id}"]}
data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data)
if data is None:
@ -172,10 +199,11 @@ def get_custom_metric_funnel_issues(projectId: int, metric_id: int,
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/issues/{issueId}/sessions', tags=["customMetrics"])
def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_funnel_sessions_by_issue(project_id=projectId, user_id=context.user_id,
metric_id=metric_id, issue_id=issueId, data=data)
@ -184,10 +212,11 @@ def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: st
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/errors', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"])
def get_custom_metric_errors_list(projectId: int, metric_id: int,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data)
@ -196,22 +225,22 @@ def get_custom_metric_errors_list(projectId: int, metric_id: int,
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/chart', tags=["card"])
@app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"])
def get_custom_metric_chart(projectId: int, metric_id: int, data: schemas.CustomMetricChartPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = dashboards.make_chart_metrics(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data)
if data is None:
return {"errors": ["custom metric not found"]}
def get_card_chart(projectId: int, metric_id: int, data: schemas.CardChartSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.make_chart_from_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data)
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.put('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCustomMetricsSchema = Body(...),
def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.update(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
if data is None:
@ -219,6 +248,7 @@ def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCus
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/status', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"])
@app.put('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"])
@ -231,6 +261,7 @@ def update_custom_metric_state(projectId: int, metric_id: int,
status=data.active)}
@app.delete('/{projectId}/cards/{metric_id}', tags=["dashboard"])
@app.delete('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def delete_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)):

View file

@ -30,7 +30,7 @@ def get_session_events(projectKey: str, sessionId: int):
if projectId is None:
return {"errors": ["invalid projectKey"]}
return {
'data': events.get_by_sessionId2_pg(
'data': events.get_by_session_id(
project_id=projectId,
session_id=sessionId
)

View file

@ -78,14 +78,13 @@ class CurrentContext(CurrentAPIContext):
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class AddSlackSchema(BaseModel):
class AddCollaborationSchema(BaseModel):
name: str = Field(...)
url: HttpUrl = Field(...)
class EditSlackSchema(BaseModel):
class EditCollaborationSchema(AddCollaborationSchema):
name: Optional[str] = Field(None)
url: HttpUrl = Field(...)
class CreateNotificationSchema(BaseModel):
@ -177,12 +176,6 @@ class WeeklyReportConfigSchema(BaseModel):
alias_generator = attribute_to_camel_case
class GetHeatmapPayloadSchema(BaseModel):
startDate: int = Field(TimeUTC.now(delta_days=-30))
endDate: int = Field(TimeUTC.now())
url: str = Field(...)
class DatadogSchema(BaseModel):
apiKey: str = Field(...)
applicationKey: str = Field(...)
@ -378,59 +371,59 @@ class ErrorSource(str, Enum):
class EventType(str, Enum):
click = "CLICK"
input = "INPUT"
location = "LOCATION"
custom = "CUSTOM"
request = "REQUEST"
request_details = "FETCH"
graphql = "GRAPHQL"
state_action = "STATEACTION"
error = "ERROR"
click_ios = "CLICK_IOS"
input_ios = "INPUT_IOS"
view_ios = "VIEW_IOS"
custom_ios = "CUSTOM_IOS"
request_ios = "REQUEST_IOS"
error_ios = "ERROR_IOS"
click = "click"
input = "input"
location = "location"
custom = "custom"
request = "request"
request_details = "fetch"
graphql = "graphql"
state_action = "stateAction"
error = "error"
click_ios = "clickIos"
input_ios = "inputIos"
view_ios = "viewIos"
custom_ios = "customIos"
request_ios = "requestIos"
error_ios = "errorIos"
class PerformanceEventType(str, Enum):
location_dom_complete = "DOM_COMPLETE"
location_largest_contentful_paint_time = "LARGEST_CONTENTFUL_PAINT_TIME"
time_between_events = "TIME_BETWEEN_EVENTS"
location_ttfb = "TTFB"
location_avg_cpu_load = "AVG_CPU_LOAD"
location_avg_memory_usage = "AVG_MEMORY_USAGE"
fetch_failed = "FETCH_FAILED"
location_dom_complete = "domComplete"
location_largest_contentful_paint_time = "largestContentfulPaintTime"
time_between_events = "timeBetweenEvents"
location_ttfb = "ttfb"
location_avg_cpu_load = "avgCpuLoad"
location_avg_memory_usage = "avgMemoryUsage"
fetch_failed = "fetchFailed"
# fetch_duration = "FETCH_DURATION"
class FilterType(str, Enum):
user_os = "USEROS"
user_browser = "USERBROWSER"
user_device = "USERDEVICE"
user_country = "USERCOUNTRY"
user_id = "USERID"
user_anonymous_id = "USERANONYMOUSID"
referrer = "REFERRER"
rev_id = "REVID"
user_os = "userOs"
user_browser = "userBrowser"
user_device = "userDevice"
user_country = "userCountry"
user_id = "userId"
user_anonymous_id = "userAnonymousId"
referrer = "referrer"
rev_id = "revId"
# IOS
user_os_ios = "USEROS_IOS"
user_device_ios = "USERDEVICE_IOS"
user_country_ios = "USERCOUNTRY_IOS"
user_id_ios = "USERID_IOS"
user_anonymous_id_ios = "USERANONYMOUSID_IOS"
rev_id_ios = "REVID_IOS"
user_os_ios = "userOsIos"
user_device_ios = "userDeviceIos"
user_country_ios = "userCountryIos"
user_id_ios = "userIdIos"
user_anonymous_id_ios = "userAnonymousIdIos"
rev_id_ios = "revIdIos"
#
duration = "DURATION"
platform = "PLATFORM"
metadata = "METADATA"
issue = "ISSUE"
events_count = "EVENTS_COUNT"
utm_source = "UTM_SOURCE"
utm_medium = "UTM_MEDIUM"
utm_campaign = "UTM_CAMPAIGN"
duration = "duration"
platform = "platform"
metadata = "metadata"
issue = "issue"
events_count = "eventsCount"
utm_source = "utmSource"
utm_medium = "utmMedium"
utm_campaign = "utmCampaign"
class SearchEventOperator(str, Enum):
@ -447,6 +440,15 @@ class SearchEventOperator(str, Enum):
_ends_with = "endsWith"
class ClickEventExtraOperator(str, Enum):
_on_selector = "onSelector"
_on_text = "onText"
class IssueFilterOperator(str, Enum):
_on_selector = ClickEventExtraOperator._on_selector.value
class PlatformType(str, Enum):
mobile = "mobile"
desktop = "desktop"
@ -507,19 +509,23 @@ class HttpMethod(str, Enum):
class FetchFilterType(str, Enum):
_url = "FETCH_URL"
_status_code = "FETCH_STATUS_CODE"
_method = "FETCH_METHOD"
_duration = "FETCH_DURATION"
_request_body = "FETCH_REQUEST_BODY"
_response_body = "FETCH_RESPONSE_BODY"
_url = "fetchUrl" # FETCH_URL
_status_code = "fetchStatusCode" # FETCH_STATUS_CODE
_method = "fetchMethod" # FETCH_METHOD
_duration = "fetchDuration" # FETCH_DURATION
_request_body = "fetchRequestBody" # FETCH_REQUEST_BODY
_response_body = "fetchResponseBody" # FETCH_RESPONSE_BODY
class GraphqlFilterType(str, Enum):
_name = "GRAPHQL_NAME"
_method = "GRAPHQL_METHOD"
_request_body = "GRAPHQL_REQUEST_BODY"
_response_body = "GRAPHQL_RESPONSE_BODY"
_name = "graphqlName" # GRAPHQL_NAME
_method = "graphqlMethod" # GRAPHQL_METHOD
_request_body = "graphqlRequestBody" # GRAPHQL_REQUEST_BODY
_response_body = "graphqlResponseBody" # GRAPHQL_RESPONSE_BODY
class IssueFilterType(str, Enum):
_selector = "CLICK_SELECTOR"
class RequestGraphqlFilterSchema(BaseModel):
@ -528,14 +534,48 @@ class RequestGraphqlFilterSchema(BaseModel):
operator: Union[SearchEventOperator, MathOperator] = Field(...)
class IssueFilterSchema(BaseModel):
type: IssueFilterType = Field(...)
value: List[str] = Field(...)
operator: IssueFilterOperator = Field(...)
class _SessionSearchEventRaw(__MixedSearchFilter):
is_event: bool = Field(default=True, const=True)
value: List[str] = Field(...)
type: Union[EventType, PerformanceEventType] = Field(...)
operator: SearchEventOperator = Field(...)
source: Optional[List[Union[ErrorSource, int, str]]] = Field(None)
sourceOperator: Optional[MathOperator] = Field(None)
filters: Optional[List[RequestGraphqlFilterSchema]] = Field(None)
operator: Union[SearchEventOperator, ClickEventExtraOperator] = Field(...)
source: Optional[List[Union[ErrorSource, int, str]]] = Field(default=None)
sourceOperator: Optional[MathOperator] = Field(default=None)
filters: Optional[List[Union[RequestGraphqlFilterSchema, IssueFilterSchema]]] = Field(default=None)
@root_validator(pre=True)
def transform(cls, values):
values["type"] = {
"CLICK": EventType.click.value,
"INPUT": EventType.input.value,
"LOCATION": EventType.location.value,
"CUSTOM": EventType.custom.value,
"REQUEST": EventType.request.value,
"FETCH": EventType.request_details.value,
"GRAPHQL": EventType.graphql.value,
"STATEACTION": EventType.state_action.value,
"ERROR": EventType.error.value,
"CLICK_IOS": EventType.click_ios.value,
"INPUT_IOS": EventType.input_ios.value,
"VIEW_IOS": EventType.view_ios.value,
"CUSTOM_IOS": EventType.custom_ios.value,
"REQUEST_IOS": EventType.request_ios.value,
"ERROR_IOS": EventType.error_ios.value,
"DOM_COMPLETE": PerformanceEventType.location_dom_complete.value,
"LARGEST_CONTENTFUL_PAINT_TIME": PerformanceEventType.location_largest_contentful_paint_time.value,
"TIME_BETWEEN_EVENTS": PerformanceEventType.time_between_events.value,
"TTFB": PerformanceEventType.location_ttfb.value,
"AVG_CPU_LOAD": PerformanceEventType.location_avg_cpu_load.value,
"AVG_MEMORY_USAGE": PerformanceEventType.location_avg_memory_usage.value,
"FETCH_FAILED": PerformanceEventType.fetch_failed.value,
}.get(values["type"], values["type"])
return values
@root_validator
def event_validator(cls, values):
@ -548,7 +588,7 @@ class _SessionSearchEventRaw(__MixedSearchFilter):
assert values.get("sourceOperator") is not None, \
"sourceOperator should not be null for PerformanceEventType"
if values["type"] == PerformanceEventType.time_between_events:
assert values["sourceOperator"] != MathOperator._equal.value, \
assert values["sourceOperator"] != MathOperator._equal, \
f"{MathOperator._equal} is not allowed for duration of {PerformanceEventType.time_between_events}"
assert len(values.get("value", [])) == 2, \
f"must provide 2 Events as value for {PerformanceEventType.time_between_events}"
@ -566,11 +606,14 @@ class _SessionSearchEventRaw(__MixedSearchFilter):
values["source"] = [ErrorSource.js_exception]
elif values.get("type") == EventType.request_details:
assert isinstance(values.get("filters"), List) and len(values.get("filters", [])) > 0, \
f"filters should be defined for {EventType.request_details.value}"
f"filters should be defined for {EventType.request_details}"
elif values.get("type") == EventType.graphql:
assert isinstance(values.get("filters"), List) and len(values.get("filters", [])) > 0, \
f"filters should be defined for {EventType.graphql.value}"
f"filters should be defined for {EventType.graphql}"
if isinstance(values.get("operator"), ClickEventExtraOperator):
assert values.get("type") == EventType.click, \
f"operator:{values['operator']} is only available for event-type: {EventType.click}"
return values
@ -580,11 +623,42 @@ class _SessionSearchEventSchema(_SessionSearchEventRaw):
class SessionSearchFilterSchema(__MixedSearchFilter):
is_event: bool = Field(False, const=False)
value: Union[Optional[Union[IssueType, PlatformType, int, str]],
Optional[List[Union[IssueType, PlatformType, int, str]]]] = Field(...)
# TODO: remove this if there nothing broken from the UI
# value: Union[Optional[Union[IssueType, PlatformType, int, str]],
# Optional[List[Union[IssueType, PlatformType, int, str]]]] = Field(...)
value: List[Union[IssueType, PlatformType, int, str]] = Field(default=[])
type: FilterType = Field(...)
operator: Union[SearchEventOperator, MathOperator] = Field(...)
source: Optional[Union[ErrorSource, str]] = Field(default=None)
filters: List[IssueFilterSchema] = Field(default=[])
@root_validator(pre=True)
def transform(cls, values):
values["type"] = {
"USEROS": FilterType.user_os.value,
"USERBROWSER": FilterType.user_browser.value,
"USERDEVICE": FilterType.user_device.value,
"USERCOUNTRY": FilterType.user_country.value,
"USERID": FilterType.user_id.value,
"USERANONYMOUSID": FilterType.user_anonymous_id.value,
"REFERRER": FilterType.referrer.value,
"REVID": FilterType.rev_id.value,
"USEROS_IOS": FilterType.user_os_ios.value,
"USERDEVICE_IOS": FilterType.user_device_ios.value,
"USERCOUNTRY_IOS": FilterType.user_country_ios.value,
"USERID_IOS": FilterType.user_id_ios.value,
"USERANONYMOUSID_IOS": FilterType.user_anonymous_id_ios.value,
"REVID_IOS": FilterType.rev_id_ios.value,
"DURATION": FilterType.duration.value,
"PLATFORM": FilterType.platform.value,
"METADATA": FilterType.metadata.value,
"ISSUE": FilterType.issue.value,
"EVENTS_COUNT": FilterType.events_count.value,
"UTM_SOURCE": FilterType.utm_source.value,
"UTM_MEDIUM": FilterType.utm_medium.value,
"UTM_CAMPAIGN": FilterType.utm_campaign.value
}.get(values["type"], values["type"])
return values
@root_validator
def filter_validator(cls, values):
@ -632,7 +706,12 @@ class SessionsSearchPayloadSchema(_PaginatedSchema):
@root_validator(pre=True)
def transform_order(cls, values):
if values.get("order") is not None:
if values.get("sort") is None:
values["sort"] = "startTs"
if values.get("order") is None:
values["order"] = SortOrderType.desc
else:
values["order"] = values["order"].upper()
return values
@ -698,16 +777,16 @@ class FunnelSearchPayloadSchema(FlatSessionsSearchPayloadSchema):
class FunnelSchema(BaseModel):
name: str = Field(...)
filter: FunnelSearchPayloadSchema = Field([])
is_public: bool = Field(False)
is_public: bool = Field(default=False)
class Config:
alias_generator = attribute_to_camel_case
class UpdateFunnelSchema(FunnelSchema):
name: Optional[str] = Field(None)
filter: Optional[FunnelSearchPayloadSchema] = Field(None)
is_public: Optional[bool] = Field(None)
name: Optional[str] = Field(default=None)
filter: Optional[FunnelSearchPayloadSchema] = Field(default=None)
is_public: Optional[bool] = Field(default=None)
class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema):
@ -764,19 +843,19 @@ class MobileSignPayloadSchema(BaseModel):
keys: List[str] = Field(...)
class CustomMetricSeriesFilterSchema(SearchErrorsSchema):
startDate: Optional[int] = Field(None)
endDate: Optional[int] = Field(None)
sort: Optional[str] = Field(None)
order: Optional[str] = Field(None)
class CardSeriesFilterSchema(SearchErrorsSchema):
startDate: Optional[int] = Field(default=None)
endDate: Optional[int] = Field(default=None)
sort: Optional[str] = Field(default=None)
order: Optional[str] = Field(default=None)
group_by_user: Optional[bool] = Field(default=False, const=True)
class CustomMetricCreateSeriesSchema(BaseModel):
class CardCreateSeriesSchema(BaseModel):
series_id: Optional[int] = Field(None)
name: Optional[str] = Field(None)
index: Optional[int] = Field(None)
filter: Optional[CustomMetricSeriesFilterSchema] = Field([])
filter: Optional[CardSeriesFilterSchema] = Field([])
class Config:
alias_generator = attribute_to_camel_case
@ -793,114 +872,226 @@ class MetricTableViewType(str, Enum):
pie_chart = "pieChart"
class MetricOtherViewType(str, Enum):
other_chart = "chart"
list_chart = "list"
class MetricType(str, Enum):
timeseries = "timeseries"
table = "table"
predefined = "predefined"
funnel = "funnel"
errors = "errors"
performance = "performance"
resources = "resources"
web_vital = "webVitals"
pathAnalysis = "pathAnalysis"
retention = "retention"
stickiness = "stickiness"
click_map = "clickMap"
insights = "insights"
class TableMetricOfType(str, Enum):
class MetricOfErrors(str, Enum):
calls_errors = "callsErrors" # calls_errors
domains_errors_4xx = "domainsErrors4xx" # domains_errors_4xx
domains_errors_5xx = "domainsErrors5xx" # domains_errors_5xx
errors_per_domains = "errorsPerDomains" # errors_per_domains
errors_per_type = "errorsPerType" # errors_per_type
impacted_sessions_by_js_errors = "impactedSessionsByJsErrors" # impacted_sessions_by_js_errors
resources_by_party = "resourcesByParty" # resources_by_party
class MetricOfPerformance(str, Enum):
cpu = "cpu" # cpu
crashes = "crashes" # crashes
fps = "fps" # fps
impacted_sessions_by_slow_pages = "impactedSessionsBySlowPages" # impacted_sessions_by_slow_pages
memory_consumption = "memoryConsumption" # memory_consumption
pages_dom_buildtime = "pagesDomBuildtime" # pages_dom_buildtime
pages_response_time = "pagesResponseTime" # pages_response_time
pages_response_time_distribution = "pagesResponseTimeDistribution" # pages_response_time_distribution
resources_vs_visually_complete = "resourcesVsVisuallyComplete" # resources_vs_visually_complete
sessions_per_browser = "sessionsPerBrowser" # sessions_per_browser
slowest_domains = "slowestDomains" # slowest_domains
speed_location = "speedLocation" # speed_location
time_to_render = "timeToRender" # time_to_render
class MetricOfResources(str, Enum):
missing_resources = "missingResources" # missing_resources
resources_count_by_type = "resourcesCountByType" # resources_count_by_type
resources_loading_time = "resourcesLoadingTime" # resources_loading_time
resource_type_vs_response_end = "resourceTypeVsResponseEnd" # resource_type_vs_response_end
slowest_resources = "slowestResources" # slowest_resources
class MetricOfWebVitals(str, Enum):
avg_cpu = "avgCpu" # avg_cpu
avg_dom_content_loaded = "avgDomContentLoaded" # avg_dom_content_loaded
avg_dom_content_load_start = "avgDomContentLoadStart" # avg_dom_content_load_start
avg_first_contentful_pixel = "avgFirstContentfulPixel" # avg_first_contentful_pixel
avg_first_paint = "avgFirstPaint" # avg_first_paint
avg_fps = "avgFps" # avg_fps
avg_image_load_time = "avgImageLoadTime" # avg_image_load_time
avg_page_load_time = "avgPageLoadTime" # avg_page_load_time
avg_pages_dom_buildtime = "avgPagesDomBuildtime" # avg_pages_dom_buildtime
avg_pages_response_time = "avgPagesResponseTime" # avg_pages_response_time
avg_request_load_time = "avgRequestLoadTime" # avg_request_load_time
avg_response_time = "avgResponseTime" # avg_response_time
avg_session_duration = "avgSessionDuration" # avg_session_duration
avg_till_first_byte = "avgTillFirstByte" # avg_till_first_byte
avg_time_to_interactive = "avgTimeToInteractive" # avg_time_to_interactive
avg_time_to_render = "avgTimeToRender" # avg_time_to_render
avg_used_js_heap_size = "avgUsedJsHeapSize" # avg_used_js_heap_size
avg_visited_pages = "avgVisitedPages" # avg_visited_pages
count_requests = "countRequests" # count_requests
count_sessions = "countSessions" # count_sessions
class MetricOfTable(str, Enum):
user_os = FilterType.user_os.value
user_browser = FilterType.user_browser.value
user_device = FilterType.user_device.value
user_country = FilterType.user_country.value
user_id = FilterType.user_id.value
issues = FilterType.issue.value
visited_url = EventType.location.value
sessions = "SESSIONS"
errors = IssueType.js_exception.value
visited_url = "location"
sessions = "sessions"
errors = "jsException"
class TimeseriesMetricOfType(str, Enum):
class MetricOfTimeseries(str, Enum):
session_count = "sessionCount"
class CustomMetricSessionsPayloadSchema(FlatSessionsSearch, _PaginatedSchema):
class MetricOfClickMap(str, Enum):
click_map_url = "clickMapUrl"
class CardSessionsSchema(FlatSessionsSearch, _PaginatedSchema):
startTimestamp: int = Field(TimeUTC.now(-7))
endTimestamp: int = Field(TimeUTC.now())
series: Optional[List[CustomMetricCreateSeriesSchema]] = Field(default=None)
series: List[CardCreateSeriesSchema] = Field(default=[])
class Config:
alias_generator = attribute_to_camel_case
class CustomMetricChartPayloadSchema(CustomMetricSessionsPayloadSchema, _PaginatedSchema):
class CardChartSchema(CardSessionsSchema):
density: int = Field(7)
class Config:
alias_generator = attribute_to_camel_case
class TryCustomMetricsPayloadSchema(CustomMetricChartPayloadSchema):
name: str = Field(...)
series: List[CustomMetricCreateSeriesSchema] = Field(...)
is_public: bool = Field(default=True)
view_type: Union[MetricTimeseriesViewType, MetricTableViewType] = Field(MetricTimeseriesViewType.line_chart)
metric_type: MetricType = Field(MetricType.timeseries)
metric_of: Union[TableMetricOfType, TimeseriesMetricOfType] = Field(TableMetricOfType.user_id)
metric_value: List[IssueType] = Field([])
metric_format: Optional[MetricFormatType] = Field(None)
# metricFraction: float = Field(None, gt=0, lt=1)
# This is used to handle wrong values sent by the UI
@root_validator(pre=True)
def remove_metric_value(cls, values):
if values.get("metricType") == MetricType.timeseries \
or values.get("metricType") == MetricType.table \
and values.get("metricOf") != TableMetricOfType.issues:
values["metricValue"] = []
return values
@root_validator
def validator(cls, values):
if values.get("metric_type") == MetricType.table:
assert isinstance(values.get("view_type"), MetricTableViewType), \
f"viewType must be of type {MetricTableViewType} for metricType:{MetricType.table.value}"
assert isinstance(values.get("metric_of"), TableMetricOfType), \
f"metricOf must be of type {TableMetricOfType} for metricType:{MetricType.table.value}"
if values.get("metric_of") != TableMetricOfType.issues:
assert values.get("metric_value") is None or len(values.get("metric_value")) == 0, \
f"metricValue is only available for metricOf:{TableMetricOfType.issues.value}"
elif values.get("metric_type") == MetricType.timeseries:
assert isinstance(values.get("view_type"), MetricTimeseriesViewType), \
f"viewType must be of type {MetricTimeseriesViewType} for metricType:{MetricType.timeseries.value}"
assert isinstance(values.get("metric_of"), TimeseriesMetricOfType), \
f"metricOf must be of type {TimeseriesMetricOfType} for metricType:{MetricType.timeseries.value}"
return values
class Config:
alias_generator = attribute_to_camel_case
class CustomMetricsConfigSchema(BaseModel):
class CardConfigSchema(BaseModel):
col: Optional[int] = Field(...)
row: Optional[int] = Field(default=2)
position: Optional[int] = Field(default=0)
class CreateCustomMetricsSchema(TryCustomMetricsPayloadSchema):
series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1)
config: CustomMetricsConfigSchema = Field(...)
class CreateCardSchema(CardChartSchema):
name: Optional[str] = Field(...)
is_public: bool = Field(default=True)
view_type: Union[MetricTimeseriesViewType, \
MetricTableViewType, MetricOtherViewType] = Field(...)
metric_type: MetricType = Field(...)
metric_of: Union[MetricOfTimeseries, MetricOfTable, MetricOfErrors, \
MetricOfPerformance, MetricOfResources, MetricOfWebVitals, \
MetricOfClickMap] = Field(MetricOfTable.user_id)
metric_value: List[IssueType] = Field(default=[])
metric_format: Optional[MetricFormatType] = Field(default=None)
default_config: CardConfigSchema = Field(..., alias="config")
is_template: bool = Field(default=False)
thumbnail: Optional[str] = Field(default=None)
# This is used to handle wrong values sent by the UI
@root_validator(pre=True)
def transform_series(cls, values):
if values.get("series") is not None and len(values["series"]) > 1 and values.get(
"metric_type") == MetricType.funnel.value:
def transform(cls, values):
values["isTemplate"] = values.get("metricType") in [MetricType.errors, MetricType.performance,
MetricType.resources, MetricType.web_vital]
if values.get("metricType") == MetricType.timeseries \
or values.get("metricType") == MetricType.table \
and values.get("metricOf") != MetricOfTable.issues:
values["metricValue"] = []
if values.get("metricType") == MetricType.funnel and \
values.get("series") is not None and len(values["series"]) > 1:
values["series"] = [values["series"][0]]
elif values.get("metricType") not in [MetricType.table,
MetricType.timeseries,
MetricType.insights,
MetricType.click_map] \
and values.get("series") is not None and len(values["series"]) > 0:
values["series"] = []
return values
@root_validator
def restrictions(cls, values):
assert values.get("metric_type") != MetricType.insights, f"metricType:{MetricType.insights} not supported yet"
return values
class CustomMetricUpdateSeriesSchema(CustomMetricCreateSeriesSchema):
@root_validator
def validator(cls, values):
if values.get("metric_type") == MetricType.timeseries:
assert isinstance(values.get("view_type"), MetricTimeseriesViewType), \
f"viewType must be of type {MetricTimeseriesViewType} for metricType:{MetricType.timeseries}"
assert isinstance(values.get("metric_of"), MetricOfTimeseries), \
f"metricOf must be of type {MetricOfTimeseries} for metricType:{MetricType.timeseries}"
elif values.get("metric_type") == MetricType.table:
assert isinstance(values.get("view_type"), MetricTableViewType), \
f"viewType must be of type {MetricTableViewType} for metricType:{MetricType.table}"
assert isinstance(values.get("metric_of"), MetricOfTable), \
f"metricOf must be of type {MetricOfTable} for metricType:{MetricType.table}"
if values.get("metric_of") in (MetricOfTable.sessions, MetricOfTable.errors):
assert values.get("view_type") == MetricTableViewType.table, \
f"viewType must be '{MetricTableViewType.table}' for metricOf:{values['metric_of']}"
if values.get("metric_of") != MetricOfTable.issues:
assert values.get("metric_value") is None or len(values.get("metric_value")) == 0, \
f"metricValue is only available for metricOf:{MetricOfTable.issues}"
elif values.get("metric_type") == MetricType.funnel:
assert len(values["series"]) == 1, f"must have only 1 series for metricType:{MetricType.funnel}"
# ignore this for now, let the UI send whatever he wants for metric_of
# assert isinstance(values.get("metric_of"), MetricOfTimeseries), \
# f"metricOf must be of type {MetricOfTimeseries} for metricType:{MetricType.funnel}"
else:
if values.get("metric_type") == MetricType.errors:
assert isinstance(values.get("metric_of"), MetricOfErrors), \
f"metricOf must be of type {MetricOfErrors} for metricType:{MetricType.errors}"
elif values.get("metric_type") == MetricType.performance:
assert isinstance(values.get("metric_of"), MetricOfPerformance), \
f"metricOf must be of type {MetricOfPerformance} for metricType:{MetricType.performance}"
elif values.get("metric_type") == MetricType.resources:
assert isinstance(values.get("metric_of"), MetricOfResources), \
f"metricOf must be of type {MetricOfResources} for metricType:{MetricType.resources}"
elif values.get("metric_type") == MetricType.web_vital:
assert isinstance(values.get("metric_of"), MetricOfWebVitals), \
f"metricOf must be of type {MetricOfWebVitals} for metricType:{MetricType.web_vital}"
elif values.get("metric_type") == MetricType.click_map:
assert isinstance(values.get("metric_of"), MetricOfClickMap), \
f"metricOf must be of type {MetricOfClickMap} for metricType:{MetricType.click_map}"
# Allow only LOCATION events for clickMap
for s in values.get("series", []):
for f in s.filter.events:
assert f.type == EventType.location, f"only events of type:{EventType.location} are allowed for metricOf:{MetricType.click_map}"
assert isinstance(values.get("view_type"), MetricOtherViewType), \
f"viewType must be 'chart|list' for metricOf:{values.get('metric_of')}"
return values
class Config:
alias_generator = attribute_to_camel_case
class CardUpdateSeriesSchema(CardCreateSeriesSchema):
series_id: Optional[int] = Field(None)
class Config:
alias_generator = attribute_to_camel_case
class UpdateCustomMetricsSchema(CreateCustomMetricsSchema):
series: List[CustomMetricUpdateSeriesSchema] = Field(..., min_items=1)
class UpdateCardSchema(CreateCardSchema):
series: List[CardUpdateSeriesSchema] = Field(...)
class UpdateCustomMetricsStatusSchema(BaseModel):
@ -941,55 +1132,6 @@ class AddWidgetToDashboardPayloadSchema(UpdateWidgetPayloadSchema):
alias_generator = attribute_to_camel_case
# these values should match the keys in metrics table
class TemplatePredefinedKeys(str, Enum):
count_sessions = "count_sessions"
avg_request_load_time = "avg_request_load_time"
avg_page_load_time = "avg_page_load_time"
avg_image_load_time = "avg_image_load_time"
avg_dom_content_load_start = "avg_dom_content_load_start"
avg_first_contentful_pixel = "avg_first_contentful_pixel"
avg_visited_pages = "avg_visited_pages"
avg_session_duration = "avg_session_duration"
avg_pages_dom_buildtime = "avg_pages_dom_buildtime"
avg_pages_response_time = "avg_pages_response_time"
avg_response_time = "avg_response_time"
avg_first_paint = "avg_first_paint"
avg_dom_content_loaded = "avg_dom_content_loaded"
avg_till_first_bit = "avg_till_first_byte"
avg_time_to_interactive = "avg_time_to_interactive"
count_requests = "count_requests"
avg_time_to_render = "avg_time_to_render"
avg_used_js_heap_size = "avg_used_js_heap_size"
avg_cpu = "avg_cpu"
avg_fps = "avg_fps"
impacted_sessions_by_js_errors = "impacted_sessions_by_js_errors"
domains_errors_4xx = "domains_errors_4xx"
domains_errors_5xx = "domains_errors_5xx"
errors_per_domains = "errors_per_domains"
calls_errors = "calls_errors"
errors_by_type = "errors_per_type"
errors_by_origin = "resources_by_party"
speed_index_by_location = "speed_location"
slowest_domains = "slowest_domains"
sessions_per_browser = "sessions_per_browser"
time_to_render = "time_to_render"
impacted_sessions_by_slow_pages = "impacted_sessions_by_slow_pages"
memory_consumption = "memory_consumption"
cpu_load = "cpu"
frame_rate = "fps"
crashes = "crashes"
resources_vs_visually_complete = "resources_vs_visually_complete"
pages_dom_buildtime = "pages_dom_buildtime"
pages_response_time = "pages_response_time"
pages_response_time_distribution = "pages_response_time_distribution"
missing_resources = "missing_resources"
slowest_resources = "slowest_resources"
resources_fetch_time = "resources_loading_time"
resource_type_vs_response_end = "resource_type_vs_response_end"
resources_count_by_type = "resources_count_by_type"
class TemplatePredefinedUnits(str, Enum):
millisecond = "ms"
second = "s"
@ -1000,24 +1142,15 @@ class TemplatePredefinedUnits(str, Enum):
count = "count"
class CustomMetricAndTemplate(BaseModel):
is_template: bool = Field(...)
project_id: Optional[int] = Field(...)
predefined_key: Optional[TemplatePredefinedKeys] = Field(...)
class Config:
alias_generator = attribute_to_camel_case
class LiveFilterType(str, Enum):
user_os = FilterType.user_os.value
user_browser = FilterType.user_browser.value
user_device = FilterType.user_device.value
user_country = FilterType.user_country.value
user_id = FilterType.user_id.value
user_anonymous_id = FilterType.user_anonymous_id.value
rev_id = FilterType.rev_id.value
platform = FilterType.platform.value
user_os = FilterType.user_os
user_browser = FilterType.user_browser
user_device = FilterType.user_device
user_country = FilterType.user_country
user_id = FilterType.user_id
user_anonymous_id = FilterType.user_anonymous_id
rev_id = FilterType.rev_id
platform = FilterType.platform
page_title = "PAGETITLE"
session_id = "SESSIONID"
metadata = "METADATA"
@ -1030,13 +1163,13 @@ class LiveFilterType(str, Enum):
class LiveSessionSearchFilterSchema(BaseModel):
value: Union[List[str], str] = Field(...)
type: LiveFilterType = Field(...)
source: Optional[str] = Field(None)
operator: Literal[SearchEventOperator._is.value,
SearchEventOperator._contains.value] = Field(SearchEventOperator._contains.value)
source: Optional[str] = Field(default=None)
operator: Literal[SearchEventOperator._is, \
SearchEventOperator._contains] = Field(default=SearchEventOperator._contains)
@root_validator
def validator(cls, values):
if values.get("type") is not None and values["type"] == LiveFilterType.metadata.value:
if values.get("type") is not None and values["type"] == LiveFilterType.metadata:
assert values.get("source") is not None, "source should not be null for METADATA type"
assert len(values.get("source")) > 0, "source should not be empty for METADATA type"
return values
@ -1059,8 +1192,8 @@ class LiveSessionsSearchPayloadSchema(_PaginatedSchema):
else:
i += 1
for i in values["filters"]:
if i.get("type") == LiveFilterType.platform.value:
i["type"] = LiveFilterType.user_device_type.value
if i.get("type") == LiveFilterType.platform:
i["type"] = LiveFilterType.user_device_type
if values.get("sort") is not None:
if values["sort"].lower() == "startts":
values["sort"] = "TIMESTAMP"
@ -1121,3 +1254,78 @@ class SessionUpdateNoteSchema(SessionNoteSchema):
break
assert c > 0, "at least 1 value should be provided for update"
return values
class WebhookType(str, Enum):
webhook = "webhook"
slack = "slack"
email = "email"
msteams = "msteams"
class SearchCardsSchema(_PaginatedSchema):
order: SortOrderType = Field(default=SortOrderType.desc)
shared_only: bool = Field(default=False)
mine_only: bool = Field(default=False)
query: Optional[str] = Field(default=None)
class Config:
alias_generator = attribute_to_camel_case
class _ClickMapSearchEventRaw(_SessionSearchEventRaw):
type: Literal[EventType.location] = Field(...)
class FlatClickMapSessionsSearch(SessionsSearchPayloadSchema):
events: Optional[List[_ClickMapSearchEventRaw]] = Field([])
filters: List[Union[SessionSearchFilterSchema, _ClickMapSearchEventRaw]] = Field([])
@root_validator(pre=True)
def transform(cls, values):
for f in values.get("filters"):
if f.get("type") == FilterType.duration:
return values
values["filters"] = values.get("filters", [])
values["filters"].append({"value": [5000], "type": FilterType.duration,
"operator": SearchEventOperator._is, "filters": []})
return values
@root_validator()
def flat_to_original(cls, values):
n_filters = []
n_events = []
for v in values.get("filters", []):
if isinstance(v, _ClickMapSearchEventRaw):
n_events.append(v)
else:
n_filters.append(v)
values["events"] = n_events
values["filters"] = n_filters
return values
class IssueAdvancedFilter(BaseModel):
type: IssueFilterType = Field(default=IssueFilterType._selector)
value: List[str] = Field(default=[])
operator: SearchEventOperator = Field(default=SearchEventOperator._is)
class ClickMapFilterSchema(BaseModel):
value: List[Literal[IssueType.click_rage, IssueType.dead_click]] = Field(default=[])
type: Literal[FilterType.issue] = Field(...)
operator: Literal[SearchEventOperator._is, MathOperator._equal] = Field(...)
# source: Optional[Union[ErrorSource, str]] = Field(default=None)
filters: List[IssueAdvancedFilter] = Field(default=[])
class GetHeatmapPayloadSchema(BaseModel):
startDate: int = Field(TimeUTC.now(delta_days=-30))
endDate: int = Field(TimeUTC.now())
url: str = Field(...)
# issues: List[Literal[IssueType.click_rage, IssueType.dead_click]] = Field(default=[])
filters: List[ClickMapFilterSchema] = Field(default=[])
click_rage: bool = Field(default=False)
class Config:
alias_generator = attribute_to_camel_case

10
ee/api/.gitignore vendored
View file

@ -176,6 +176,7 @@ chalicelib/.config
chalicelib/saas
README/*
Pipfile
Pipfile.lock
.local/*
@ -183,8 +184,12 @@ Pipfile
#exp /chalicelib/core/alerts_processor.py
/chalicelib/core/announcements.py
/chalicelib/core/autocomplete.py
/chalicelib/core/click_maps.py
/chalicelib/core/collaboration_base.py
/chalicelib/core/collaboration_msteams.py
/chalicelib/core/collaboration_slack.py
/chalicelib/core/countries.py
/chalicelib/core/dashboards.py
#exp /chalicelib/core/errors.py
/chalicelib/core/errors_favorite.py
#exp /chalicelib/core/events.py
@ -211,10 +216,9 @@ Pipfile
/chalicelib/core/metadata.py
/chalicelib/core/mobile.py
/chalicelib/core/sessions_assignments.py
/chalicelib/core/sessions_metas.py
#/chalicelib/core/sessions_metas.py
/chalicelib/core/sessions_mobs.py
#exp /chalicelib/core/significance.py
/chalicelib/core/slack.py
/chalicelib/core/socket_ios.py
/chalicelib/core/sourcemaps.py
/chalicelib/core/sourcemaps_parser.py
@ -226,6 +230,7 @@ Pipfile
/chalicelib/utils/dev.py
/chalicelib/utils/email_handler.py
/chalicelib/utils/email_helper.py
/chalicelib/utils/errors_helper.py
/chalicelib/utils/event_filter_definition.py
/chalicelib/utils/github_client_v3.py
/chalicelib/utils/helper.py
@ -234,6 +239,7 @@ Pipfile
/chalicelib/utils/pg_client.py
/chalicelib/utils/s3.py
/chalicelib/utils/smtp.py
/chalicelib/utils/sql_helper.py
/chalicelib/utils/strings.py
/chalicelib/utils/TimeUTC.py
/routers/app/__init__.py

View file

@ -1,4 +1,4 @@
FROM python:3.10-alpine
FROM python:3.11-alpine
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
RUN apk add --no-cache build-base libressl libffi-dev libressl-dev libxslt-dev libxml2-dev xmlsec-dev xmlsec tini

View file

@ -1,4 +1,4 @@
FROM python:3.10-alpine
FROM python:3.11-alpine
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
RUN apk add --no-cache build-base tini

View file

@ -1,4 +1,4 @@
FROM python:3.10-alpine
FROM python:3.11-alpine
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
RUN apk add --no-cache build-base tini

View file

@ -17,7 +17,7 @@ from routers import core, core_dynamic, ee, saml
from routers.crons import core_crons
from routers.crons import core_dynamic_crons
from routers.crons import ee_crons
from routers.subs import dashboard, insights, metrics, v1_api_ee
from routers.subs import insights, metrics, v1_api_ee
from routers.subs import v1_api
app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default=""))
@ -64,7 +64,6 @@ app.include_router(ee.app_apikey)
app.include_router(saml.public_app)
app.include_router(saml.app)
app.include_router(saml.app_apikey)
app.include_router(dashboard.app)
app.include_router(metrics.app)
app.include_router(insights.app)
app.include_router(v1_api.app_apikey)
@ -109,4 +108,3 @@ async def stop_server():
await shutdown()
import os, signal
os.kill(1, signal.SIGTERM)

View file

@ -0,0 +1,138 @@
import hashlib
from decouple import config
import schemas
import schemas_ee
from chalicelib.utils import s3, pg_client, helper, s3_extra
from chalicelib.utils.TimeUTC import TimeUTC
def generate_file_key(project_id, key):
return f"{project_id}/{hashlib.md5(key.encode()).hexdigest()}"
def presign_record(project_id, data: schemas_ee.AssistRecordPayloadSchema, context: schemas_ee.CurrentContext):
key = generate_file_key(project_id=project_id, key=f"{TimeUTC.now()}-{data.name}")
presigned_url = s3.get_presigned_url_for_upload(bucket=config('ASSIST_RECORDS_BUCKET'), expires_in=1800, key=key)
return {"URL": presigned_url, "key": key}
def save_record(project_id, data: schemas_ee.AssistRecordSavePayloadSchema, context: schemas_ee.CurrentContext):
s3_extra.tag_record(file_key=data.key, tag_value=config('RETENTION_L_VALUE', default='vault'))
params = {"user_id": context.user_id, "project_id": project_id, **data.dict()}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
f"""INSERT INTO assist_records(project_id, user_id, name, file_key, duration, session_id)
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(key)s,%(duration)s, %(session_id)s)
RETURNING record_id, user_id, session_id, created_at, name, duration,
(SELECT name FROM users WHERE users.user_id = %(user_id)s LIMIT 1) AS created_by, file_key;""",
params)
cur.execute(query)
result = helper.dict_to_camel_case(cur.fetchone())
result["URL"] = s3.client.generate_presigned_url(
'get_object',
Params={'Bucket': config("ASSIST_RECORDS_BUCKET"), 'Key': result.pop("fileKey")},
ExpiresIn=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900)
)
return result
def search_records(project_id, data: schemas_ee.AssistRecordSearchPayloadSchema, context: schemas_ee.CurrentContext):
conditions = ["projects.tenant_id=%(tenant_id)s",
"projects.deleted_at ISNULL",
"assist_records.created_at>=%(startDate)s",
"assist_records.created_at<=%(endDate)s",
"assist_records.deleted_at ISNULL"]
params = {"tenant_id": context.tenant_id, "project_id": project_id,
"startDate": data.startDate, "endDate": data.endDate,
"p_start": (data.page - 1) * data.limit, "p_limit": data.limit,
**data.dict()}
if data.user_id is not None:
conditions.append("assist_records.user_id=%(user_id)s")
if data.query is not None and len(data.query) > 0:
conditions.append("(users.name ILIKE %(query)s OR assist_records.name ILIKE %(query)s)")
params["query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT record_id, user_id, session_id, assist_records.created_at,
assist_records.name, duration, users.name AS created_by
FROM assist_records
INNER JOIN projects USING (project_id)
LEFT JOIN users USING (user_id)
WHERE {" AND ".join(conditions)}
ORDER BY assist_records.created_at {data.order}
LIMIT %(p_limit)s OFFSET %(p_start)s;""",
params)
cur.execute(query)
results = helper.list_to_camel_case(cur.fetchall())
return results
def get_record(project_id, record_id, context: schemas_ee.CurrentContext):
conditions = ["projects.tenant_id=%(tenant_id)s",
"projects.deleted_at ISNULL",
"assist_records.record_id=%(record_id)s",
"assist_records.deleted_at ISNULL"]
params = {"tenant_id": context.tenant_id, "project_id": project_id, "record_id": record_id}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT record_id, user_id, session_id, assist_records.created_at,
assist_records.name, duration, users.name AS created_by,
file_key
FROM assist_records
INNER JOIN projects USING (project_id)
LEFT JOIN users USING (user_id)
WHERE {" AND ".join(conditions)}
LIMIT 1;""", params)
cur.execute(query)
result = helper.dict_to_camel_case(cur.fetchone())
if result:
result["URL"] = s3.client.generate_presigned_url(
'get_object',
Params={'Bucket': config("ASSIST_RECORDS_BUCKET"), 'Key': result.pop("fileKey")},
ExpiresIn=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900)
)
return result
def update_record(project_id, record_id, data: schemas_ee.AssistRecordUpdatePayloadSchema,
context: schemas_ee.CurrentContext):
conditions = ["assist_records.record_id=%(record_id)s", "assist_records.deleted_at ISNULL"]
params = {"tenant_id": context.tenant_id, "project_id": project_id, "record_id": record_id, "name": data.name}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""UPDATE assist_records
SET name= %(name)s
FROM (SELECT users.name AS created_by
FROM assist_records INNER JOIN users USING (user_id)
WHERE record_id = %(record_id)s
AND assist_records.deleted_at ISNULL
LIMIT 1) AS users
WHERE {" AND ".join(conditions)}
RETURNING record_id, user_id, session_id, assist_records.created_at,
assist_records.name, duration, created_by, file_key;""", params)
cur.execute(query)
result = helper.dict_to_camel_case(cur.fetchone())
if not result:
return {"errors": ["record not found"]}
result["URL"] = s3.client.generate_presigned_url(
'get_object',
Params={'Bucket': config("ASSIST_RECORDS_BUCKET"), 'Key': result.pop("fileKey")},
ExpiresIn=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900)
)
return result
def delete_record(project_id, record_id, context: schemas_ee.CurrentContext):
conditions = ["assist_records.record_id=%(record_id)s"]
params = {"tenant_id": context.tenant_id, "project_id": project_id, "record_id": record_id}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""UPDATE assist_records
SET deleted_at= (now() at time zone 'utc')
WHERE {" AND ".join(conditions)}
RETURNING file_key;""", params)
cur.execute(query)
result = helper.dict_to_camel_case(cur.fetchone())
if not result:
return {"errors": ["record not found"]}
s3_extra.tag_record(file_key=result["fileKey"], tag_value=config('RETENTION_D_VALUE', default='default'))
return {"state": "success"}

View file

@ -38,13 +38,16 @@ def jwt_context(context):
}
def get_jwt_exp(iat):
return iat // 1000 + config("JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000
def generate_jwt(id, tenant_id, iat, aud, exp=None):
token = jwt.encode(
payload={
"userId": id,
"tenantId": tenant_id,
"exp": exp + TimeUTC.get_utc_offset() // 1000 if exp is not None \
else iat // 1000 + config("JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000,
"exp": exp + TimeUTC.get_utc_offset() // 1000 if exp is not None else get_jwt_exp(iat),
"iss": config("JWT_ISSUER"),
"iat": iat // 1000,
"aud": aud

View file

@ -1,9 +1,10 @@
import schemas
from chalicelib.core import countries, events, metadata
from chalicelib.utils import ch_client
from chalicelib.utils import helper
from chalicelib.utils import helper, exp_ch_helper
from chalicelib.utils.event_filter_definition import Event
TABLE = "final.autocomplete"
TABLE = "experimental.autocomplete"
def __get_autocomplete_table(value, project_id):
@ -19,13 +20,23 @@ def __get_autocomplete_table(value, project_id):
schemas.EventType.input]
autocomplete_events.sort()
sub_queries = []
c_list = []
for e in autocomplete_events:
if e == schemas.FilterType.user_country:
c_list = countries.get_country_code_autocomplete(value)
if len(c_list) > 0:
sub_queries.append(f"""(SELECT DISTINCT ON(value) type, value
FROM {TABLE}
WHERE project_id = %(project_id)s
AND type= '{e}'
AND value IN %(c_list)s)""")
continue
sub_queries.append(f"""(SELECT type, value
FROM {TABLE}
WHERE project_id = %(project_id)s
AND type= '{e}'
AND value ILIKE %(svalue)s
ORDER BY value
ORDER BY value
LIMIT 5)""")
if len(value) > 2:
sub_queries.append(f"""(SELECT type, value
@ -33,12 +44,14 @@ def __get_autocomplete_table(value, project_id):
WHERE project_id = %(project_id)s
AND type= '{e}'
AND value ILIKE %(value)s
ORDER BY value
ORDER BY value
LIMIT 5)""")
with ch_client.ClickHouseClient() as cur:
query = " UNION DISTINCT ".join(sub_queries) + ";"
params = {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}
params = {"project_id": project_id,
"value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value),
"c_list": tuple(c_list)}
results = []
try:
results = cur.execute(query=query, params=params)
@ -55,12 +68,21 @@ def __get_autocomplete_table(value, project_id):
def __generic_query(typename, value_length=None):
if typename == schemas.FilterType.user_country:
return f"""SELECT DISTINCT value, type
FROM {TABLE}
WHERE
project_id = %(project_id)s
AND type='{typename.upper()}'
AND value IN %(value)s
ORDER BY value"""
if value_length is None or value_length > 2:
return f"""(SELECT DISTINCT value, type
FROM {TABLE}
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND type='{typename.upper()}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 5)
@ -69,7 +91,7 @@ def __generic_query(typename, value_length=None):
FROM {TABLE}
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND type='{typename.upper()}'
AND value ILIKE %(value)s
ORDER BY value
LIMIT 5);"""
@ -77,7 +99,7 @@ def __generic_query(typename, value_length=None):
FROM {TABLE}
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND type='{typename.upper()}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 10;"""
@ -98,10 +120,137 @@ def __generic_autocomplete(event: Event):
def __generic_autocomplete_metas(typename):
def f(project_id, text):
with ch_client.ClickHouseClient() as cur:
query = __generic_query(typename, value_length=len(text))
params = {"project_id": project_id, "value": helper.string_to_sql_like(text),
"svalue": helper.string_to_sql_like("^" + text)}
results = cur.execute(query=query, params=params)
return results
if typename == schemas.FilterType.user_country:
params["value"] = tuple(countries.get_country_code_autocomplete(text))
if len(params["value"]) == 0:
return []
query = __generic_query(typename, value_length=len(text))
rows = cur.execute(query=query, params=params)
return rows
return f
def __pg_errors_query(source=None, value_length=None):
MAIN_TABLE = exp_ch_helper.get_main_js_errors_sessions_table()
if value_length is None or value_length > 2:
return f"""((SELECT DISTINCT ON(message)
message AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
FROM {MAIN_TABLE}
WHERE
project_id = %(project_id)s
AND message ILIKE %(svalue)s
AND event_type = 'ERROR'
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT ON(name)
name AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
FROM {MAIN_TABLE}
WHERE
project_id = %(project_id)s
AND name ILIKE %(svalue)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT ON(message)
message AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
FROM {MAIN_TABLE}
WHERE
project_id = %(project_id)s
AND message ILIKE %(value)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT ON(name)
name AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
FROM {MAIN_TABLE}
WHERE
project_id = %(project_id)s
AND name ILIKE %(value)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5));"""
return f"""((SELECT DISTINCT ON(message)
message AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
FROM {MAIN_TABLE}
WHERE
project_id = %(project_id)s
AND message ILIKE %(svalue)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT ON(name)
name AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
FROM {MAIN_TABLE}
WHERE
project_id = %(project_id)s
AND name ILIKE %(svalue)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5));"""
def __search_errors(project_id, value, key=None, source=None):
with ch_client.ClickHouseClient() as cur:
query = cur.format(__pg_errors_query(source, value_length=len(value)),
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value),
"source": source})
results = cur.execute(query)
return helper.list_to_camel_case(results)
def __search_errors_ios(project_id, value, key=None, source=None):
# TODO: define this when ios events are supported in CH
return []
def __search_metadata(project_id, value, key=None, source=None):
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
if len(meta_keys) == 0 or key is not None and key not in meta_keys.keys():
return []
sub_from = []
if key is not None:
meta_keys = {key: meta_keys[key]}
for k in meta_keys.keys():
colname = metadata.index_to_colname(meta_keys[k])
if len(value) > 2:
sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM {exp_ch_helper.get_main_sessions_table()}
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM {exp_ch_helper.get_main_sessions_table()}
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(value)s LIMIT 5))
""")
else:
sub_from.append(f"""(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM {exp_ch_helper.get_main_sessions_table()}
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
with ch_client.ClickHouseClient() as cur:
query = cur.format(f"""SELECT key, value, 'METADATA' AS TYPE
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)})
results = cur.execute(query)
return helper.list_to_camel_case(results)

View file

@ -1,13 +1,16 @@
import json
from typing import Union
from fastapi import HTTPException
from starlette import status
from decouple import config
import schemas
from chalicelib.core import funnels, issues
import schemas_ee
from chalicelib.core import funnels, issues, metrics, click_maps, sessions_insights
from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
from decouple import config
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
print(">>> Using experimental error search")
from . import errors_exp as errors
@ -22,7 +25,7 @@ else:
PIE_CHART_GROUP = 5
def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema):
def __try_live(project_id, data: schemas_ee.CreateCardSchema):
results = []
for i, s in enumerate(data.series):
s.filter.startDate = data.startTimestamp
@ -55,11 +58,11 @@ def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema):
return results
def __is_funnel_chart(data: schemas.TryCustomMetricsPayloadSchema):
def __is_funnel_chart(data: schemas_ee.CreateCardSchema):
return data.metric_type == schemas.MetricType.funnel
def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema):
def __get_funnel_chart(project_id, data: schemas_ee.CreateCardSchema):
if len(data.series) == 0:
return {
"stages": [],
@ -70,12 +73,12 @@ def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema):
return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter)
def __is_errors_list(data):
def __is_errors_list(data: schemas_ee.CreateCardSchema):
return data.metric_type == schemas.MetricType.table \
and data.metric_of == schemas.TableMetricOfType.errors
and data.metric_of == schemas.MetricOfTable.errors
def __get_errors_list(project_id, user_id, data):
def __get_errors_list(project_id, user_id, data: schemas_ee.CreateCardSchema):
if len(data.series) == 0:
return {
"total": 0,
@ -88,12 +91,12 @@ def __get_errors_list(project_id, user_id, data):
return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id)
def __is_sessions_list(data):
def __is_sessions_list(data: schemas_ee.CreateCardSchema):
return data.metric_type == schemas.MetricType.table \
and data.metric_of == schemas.TableMetricOfType.sessions
and data.metric_of == schemas.MetricOfTable.sessions
def __get_sessions_list(project_id, user_id, data):
def __get_sessions_list(project_id, user_id, data: schemas_ee.CreateCardSchema):
if len(data.series) == 0:
print("empty series")
return {
@ -107,14 +110,53 @@ def __get_sessions_list(project_id, user_id, data):
return sessions.search_sessions(data=data.series[0].filter, project_id=project_id, user_id=user_id)
def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema, user_id=None):
if __is_funnel_chart(data):
def __is_predefined(data: schemas_ee.CreateCardSchema):
return data.is_template
def __is_click_map(data: schemas_ee.CreateCardSchema):
return data.metric_type == schemas.MetricType.click_map
def __get_click_map_chart(project_id, user_id, data: schemas_ee.CreateCardSchema):
if len(data.series) == 0:
return None
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
return click_maps.search_short_session(project_id=project_id, user_id=user_id,
data=schemas.FlatClickMapSessionsSearch(**data.series[0].filter.dict()))
# EE only
def __is_insights(data: schemas_ee.CreateCardSchema):
return data.metric_type == schemas.MetricType.insights
# EE only
def __get_insights_chart(project_id, user_id, data: schemas_ee.CreateCardSchema):
return sessions_insights.fetch_selected(project_id=project_id,
data=schemas_ee.GetInsightsSchema(startTimestamp=data.startTimestamp,
endTimestamp=data.endTimestamp,
metricValue=data.metric_value,
series=data.series))
def merged_live(project_id, data: schemas_ee.CreateCardSchema, user_id=None):
if data.is_template:
return get_predefined_metric(key=data.metric_of, project_id=project_id, data=data.dict())
elif __is_funnel_chart(data):
return __get_funnel_chart(project_id=project_id, data=data)
elif __is_errors_list(data):
return __get_errors_list(project_id=project_id, user_id=user_id, data=data)
elif __is_sessions_list(data):
return __get_sessions_list(project_id=project_id, user_id=user_id, data=data)
elif __is_click_map(data):
return __get_click_map_chart(project_id=project_id, user_id=user_id, data=data)
# EE only
elif __is_insights(data):
return __get_insights_chart(project_id=project_id, user_id=user_id, data=data)
elif len(data.series) == 0:
return []
series_charts = __try_live(project_id=project_id, data=data)
if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table:
return series_charts
@ -126,12 +168,12 @@ def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema, user_id
return results
def __merge_metric_with_data(metric, data: Union[schemas.CustomMetricChartPayloadSchema,
schemas.CustomMetricSessionsPayloadSchema]) \
-> Union[schemas.CreateCustomMetricsSchema, None]:
def __merge_metric_with_data(metric: schemas_ee.CreateCardSchema,
data: schemas.CardChartSchema) -> schemas_ee.CreateCardSchema:
if data.series is not None and len(data.series) > 0:
metric["series"] = data.series
metric: schemas.CreateCustomMetricsSchema = schemas.CreateCustomMetricsSchema.parse_obj({**data.dict(), **metric})
metric.series = data.series
metric: schemas_ee.CreateCardSchema = schemas_ee.CreateCardSchema(
**{**data.dict(by_alias=True), **metric.dict(by_alias=True)})
if len(data.filters) > 0 or len(data.events) > 0:
for s in metric.series:
if len(data.filters) > 0:
@ -141,35 +183,23 @@ def __merge_metric_with_data(metric, data: Union[schemas.CustomMetricChartPayloa
return metric
def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema, metric=None):
def make_chart(project_id, user_id, metric_id, data: schemas.CardChartSchema,
metric: schemas_ee.CreateCardSchema = None):
if metric is None:
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
metric: schemas_ee.CreateCardSchema = __merge_metric_with_data(metric=metric, data=data)
return merged_live(project_id=project_id, data=metric, user_id=user_id)
# if __is_funnel_chart(metric):
# return __get_funnel_chart(project_id=project_id, data=metric)
# elif __is_errors_list(metric):
# return __get_errors_list(project_id=project_id, user_id=user_id, data=metric)
#
# series_charts = __try_live(project_id=project_id, data=metric)
# if metric.view_type == schemas.MetricTimeseriesViewType.progress or metric.metric_type == schemas.MetricType.table:
# return series_charts
# results = [{}] * len(series_charts[0])
# for i in range(len(results)):
# for j, series_chart in enumerate(series_charts):
# results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"],
# metric.series[j].name: series_chart[i]["count"]}
# return results
def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
def get_sessions(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
raw_metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if raw_metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
metric: schemas_ee.CreateCardSchema = schemas_ee.CreateCardSchema(**raw_metric)
metric: schemas_ee.CreateCardSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
results = []
@ -184,11 +214,12 @@ def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessi
return results
def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
raw_metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if raw_metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
metric: schemas_ee.CreateCardSchema = schemas_ee.CreateCardSchema(**raw_metric)
metric: schemas_ee.CreateCardSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
for s in metric.series:
@ -200,11 +231,12 @@ def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CustomMetric
**funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)}
def get_errors_list(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
def get_errors_list(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
raw_metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if raw_metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
metric: schemas_ee.CreateCardSchema = schemas_ee.CreateCardSchema(**raw_metric)
metric: schemas_ee.CreateCardSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
for s in metric.series:
@ -216,7 +248,7 @@ def get_errors_list(project_id, user_id, metric_id, data: schemas.CustomMetricSe
**errors.search(data=s.filter, project_id=project_id, user_id=user_id)}
def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadSchema):
def try_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
results = []
if data.series is None:
return results
@ -231,7 +263,7 @@ def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadS
return results
def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema, dashboard=False):
def create(project_id, user_id, data: schemas_ee.CreateCardSchema, dashboard=False):
with pg_client.PostgresClient() as cur:
_data = {}
for i, s in enumerate(data.series):
@ -240,35 +272,35 @@ def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema, dashboa
_data[f"index_{i}"] = i
_data[f"filter_{i}"] = s.filter.json()
series_len = len(data.series)
data.series = None
params = {"user_id": user_id, "project_id": project_id,
"default_config": json.dumps(data.config.dict()),
**data.dict(), **_data}
query = cur.mogrify(f"""\
WITH m AS (INSERT INTO metrics (project_id, user_id, name, is_public,
view_type, metric_type, metric_of, metric_value,
metric_format, default_config)
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
%(metric_format)s, %(default_config)s)
RETURNING *)
INSERT
INTO metric_series(metric_id, index, name, filter)
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)"
for i in range(series_len)])}
RETURNING metric_id;""", params)
params = {"user_id": user_id, "project_id": project_id, **data.dict(), **_data}
params["default_config"] = json.dumps(data.default_config.dict())
query = """INSERT INTO metrics (project_id, user_id, name, is_public,
view_type, metric_type, metric_of, metric_value,
metric_format, default_config, thumbnail)
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
%(metric_format)s, %(default_config)s, %(thumbnail)s)
RETURNING metric_id"""
if len(data.series) > 0:
query = f"""WITH m AS ({query})
INSERT INTO metric_series(metric_id, index, name, filter)
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)"
for i in range(series_len)])}
RETURNING metric_id;"""
cur.execute(
query
)
query = cur.mogrify(query, params)
# print("-------")
# print(query)
# print("-------")
cur.execute(query)
r = cur.fetchone()
if dashboard:
return r["metric_id"]
return {"data": get(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)}
return {"data": get_card(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)}
def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSchema):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
def update(metric_id, user_id, project_id, data: schemas_ee.UpdateCardSchema):
metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
return None
series_ids = [r["seriesId"] for r in metric["series"]]
@ -280,7 +312,7 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche
"user_id": user_id, "project_id": project_id, "view_type": data.view_type,
"metric_type": data.metric_type, "metric_of": data.metric_of,
"metric_value": data.metric_value, "metric_format": data.metric_format,
"config": json.dumps(data.config.dict())}
"config": json.dumps(data.default_config.dict()), "thumbnail": data.thumbnail}
for i, s in enumerate(data.series):
prefix = "u_"
if s.index is None:
@ -331,16 +363,42 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche
metric_of= %(metric_of)s, metric_value= %(metric_value)s,
metric_format= %(metric_format)s,
edited_at = timezone('utc'::text, now()),
default_config = %(config)s
default_config = %(config)s,
thumbnail = %(thumbnail)s
WHERE metric_id = %(metric_id)s
AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)
RETURNING metric_id;""", params)
cur.execute(query)
return get(metric_id=metric_id, project_id=project_id, user_id=user_id)
return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id)
def get_all(project_id, user_id, include_series=False):
# def __presign_thumbnail(card):
# if card["thumbnail_url"]:
# card["thumbnail_url"] = s3.client.generate_presigned_url(
# 'get_object',
# Params={'Bucket': config('THUMBNAILS_BUCKET'), 'Key': card["thumbnail_url"]},
# ExpiresIn=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900)
# )
def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_series=False):
constraints = ["metrics.project_id = %(project_id)s",
"metrics.deleted_at ISNULL"]
params = {"project_id": project_id, "user_id": user_id,
"offset": (data.page - 1) * data.limit,
"limit": data.limit, }
if data.mine_only:
constraints.append("user_id = %(user_id)s")
else:
constraints.append("(user_id = %(user_id)s OR metrics.is_public)")
if data.shared_only:
constraints.append("is_public")
if data.query is not None and len(data.query) > 0:
constraints.append("(name ILIKE %(query)s OR owner.owner_email ILIKE %(query)s)")
params["query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
with pg_client.PostgresClient() as cur:
sub_join = ""
if include_series:
@ -349,45 +407,55 @@ def get_all(project_id, user_id, include_series=False):
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)"""
cur.execute(
cur.mogrify(
f"""SELECT *
FROM metrics
{sub_join}
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
FROM (SELECT DISTINCT dashboard_id, name, is_public
FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id)
WHERE deleted_at ISNULL
AND dashboard_widgets.metric_id = metrics.metric_id
AND project_id = %(project_id)s
AND ((dashboards.user_id = %(user_id)s OR is_public))) AS connected_dashboards
) AS connected_dashboards ON (TRUE)
LEFT JOIN LATERAL (SELECT email AS owner_email
FROM users
WHERE deleted_at ISNULL
AND users.user_id = metrics.user_id
) AS owner ON (TRUE)
WHERE metrics.project_id = %(project_id)s
AND metrics.deleted_at ISNULL
AND (user_id = %(user_id)s OR metrics.is_public)
ORDER BY metrics.edited_at DESC, metrics.created_at DESC;""",
{"project_id": project_id, "user_id": user_id}
)
)
query = cur.mogrify(
f"""SELECT metric_id, project_id, user_id, name, is_public, created_at, edited_at,
metric_type, metric_of, metric_format, metric_value, view_type, is_pinned,
dashboards, owner_email, default_config AS config, thumbnail
FROM metrics
{sub_join}
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
FROM (SELECT DISTINCT dashboard_id, name, is_public
FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id)
WHERE deleted_at ISNULL
AND dashboard_widgets.metric_id = metrics.metric_id
AND project_id = %(project_id)s
AND ((dashboards.user_id = %(user_id)s OR is_public))) AS connected_dashboards
) AS connected_dashboards ON (TRUE)
LEFT JOIN LATERAL (SELECT email AS owner_email
FROM users
WHERE deleted_at ISNULL
AND users.user_id = metrics.user_id
) AS owner ON (TRUE)
WHERE {" AND ".join(constraints)}
ORDER BY created_at {data.order}
LIMIT %(limit)s OFFSET %(offset)s;""", params)
cur.execute(query)
rows = cur.fetchall()
if include_series:
for r in rows:
# r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
# __presign_thumbnail(r)
for s in r["series"]:
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
else:
for r in rows:
# __presign_thumbnail(r)
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r["edited_at"] = TimeUTC.datetime_to_timestamp(r["edited_at"])
rows = helper.list_to_camel_case(rows)
return rows
def get_all(project_id, user_id):
default_search = schemas.SearchCardsSchema()
result = rows = search_all(project_id=project_id, user_id=user_id, data=default_search)
while len(rows) == default_search.limit:
default_search.page += 1
rows = search_all(project_id=project_id, user_id=user_id, data=default_search)
result += rows
return result
def delete(project_id, metric_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
@ -403,37 +471,37 @@ def delete(project_id, metric_id, user_id):
return {"state": "success"}
def get(metric_id, project_id, user_id, flatten=True):
def get_card(metric_id, project_id, user_id, flatten=True):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT *, default_config AS config
FROM metrics
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
FROM (SELECT dashboard_id, name, is_public
FROM dashboards
WHERE deleted_at ISNULL
AND project_id = %(project_id)s
AND ((user_id = %(user_id)s OR is_public))) AS connected_dashboards
) AS connected_dashboards ON (TRUE)
LEFT JOIN LATERAL (SELECT email AS owner_email
FROM users
WHERE deleted_at ISNULL
AND users.user_id = metrics.user_id
) AS owner ON (TRUE)
WHERE metrics.project_id = %(project_id)s
AND metrics.deleted_at ISNULL
AND (metrics.user_id = %(user_id)s OR metrics.is_public)
AND metrics.metric_id = %(metric_id)s
ORDER BY created_at;""",
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
)
query = cur.mogrify(
"""SELECT *, default_config AS config
FROM metrics
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
FROM (SELECT dashboard_id, name, is_public
FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id)
WHERE deleted_at ISNULL
AND project_id = %(project_id)s
AND ((dashboards.user_id = %(user_id)s OR is_public))
AND metric_id = %(metric_id)s) AS connected_dashboards
) AS connected_dashboards ON (TRUE)
LEFT JOIN LATERAL (SELECT email AS owner_email
FROM users
WHERE deleted_at ISNULL
AND users.user_id = metrics.user_id
) AS owner ON (TRUE)
WHERE metrics.project_id = %(project_id)s
AND metrics.deleted_at ISNULL
AND (metrics.user_id = %(user_id)s OR metrics.is_public)
AND metrics.metric_id = %(metric_id)s
ORDER BY created_at;""",
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
)
cur.execute(query)
row = cur.fetchone()
if row is None:
return None
@ -456,9 +524,8 @@ def get_with_template(metric_id, project_id, user_id, include_dashboard=True):
AND project_id = %(project_id)s
AND ((user_id = %(user_id)s OR is_public))) AS connected_dashboards
) AS connected_dashboards ON (TRUE)"""
cur.execute(
cur.mogrify(
f"""SELECT *, default_config AS config
query = cur.mogrify(
f"""SELECT *, default_config AS config
FROM metrics
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
FROM metric_series
@ -471,9 +538,9 @@ def get_with_template(metric_id, project_id, user_id, include_dashboard=True):
AND (metrics.user_id = %(user_id)s OR metrics.is_public)
AND metrics.metric_id = %(metric_id)s
ORDER BY created_at;""",
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
)
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
)
cur.execute(query)
row = cur.fetchone()
return helper.dict_to_camel_case(row)
@ -512,17 +579,17 @@ def change_state(project_id, metric_id, user_id, status):
AND (user_id = %(user_id)s OR is_public);""",
{"metric_id": metric_id, "status": status, "user_id": user_id})
)
return get(metric_id=metric_id, project_id=project_id, user_id=user_id)
return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id)
def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
data: schemas.CustomMetricSessionsPayloadSchema
data: schemas.CardSessionsSchema
# , range_value=None, start_date=None, end_date=None
):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
return None
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
metric: schemas_ee.CreateCardSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
for s in metric.series:
@ -551,3 +618,91 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
issue=issue, data=s.filter)
if issue is not None else {"total": 0, "sessions": []},
"issue": issue}
def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardChartSchema):
raw_metric = get_with_template(metric_id=metric_id, project_id=project_id, user_id=user_id,
include_dashboard=False)
if raw_metric is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="card not found")
metric: schemas_ee.CreateCardSchema = schemas_ee.CreateCardSchema(**raw_metric)
if metric.is_template:
return get_predefined_metric(key=metric.metric_of, project_id=project_id, data=data.dict())
else:
return make_chart(project_id=project_id, user_id=user_id, metric_id=metric_id, data=data, metric=metric)
PREDEFINED = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions,
schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time,
schemas.MetricOfWebVitals.avg_page_load_time: metrics.get_application_activity_avg_page_load_time,
schemas.MetricOfWebVitals.avg_request_load_time: metrics.get_application_activity_avg_request_load_time,
schemas.MetricOfWebVitals.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start,
schemas.MetricOfWebVitals.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel,
schemas.MetricOfWebVitals.avg_visited_pages: metrics.get_user_activity_avg_visited_pages,
schemas.MetricOfWebVitals.avg_session_duration: metrics.get_user_activity_avg_session_duration,
schemas.MetricOfWebVitals.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfWebVitals.avg_pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfWebVitals.avg_response_time: metrics.get_top_metrics_avg_response_time,
schemas.MetricOfWebVitals.avg_first_paint: metrics.get_top_metrics_avg_first_paint,
schemas.MetricOfWebVitals.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded,
schemas.MetricOfWebVitals.avg_till_first_byte: metrics.get_top_metrics_avg_till_first_bit,
schemas.MetricOfWebVitals.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive,
schemas.MetricOfWebVitals.count_requests: metrics.get_top_metrics_count_requests,
schemas.MetricOfWebVitals.avg_time_to_render: metrics.get_time_to_render,
schemas.MetricOfWebVitals.avg_used_js_heap_size: metrics.get_memory_consumption,
schemas.MetricOfWebVitals.avg_cpu: metrics.get_avg_cpu,
schemas.MetricOfWebVitals.avg_fps: metrics.get_avg_fps,
schemas.MetricOfErrors.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors,
schemas.MetricOfErrors.domains_errors_4xx: metrics.get_domains_errors_4xx,
schemas.MetricOfErrors.domains_errors_5xx: metrics.get_domains_errors_5xx,
schemas.MetricOfErrors.errors_per_domains: metrics.get_errors_per_domains,
schemas.MetricOfErrors.calls_errors: metrics.get_calls_errors,
schemas.MetricOfErrors.errors_per_type: metrics.get_errors_per_type,
schemas.MetricOfErrors.resources_by_party: metrics.get_resources_by_party,
schemas.MetricOfPerformance.speed_location: metrics.get_speed_index_location,
schemas.MetricOfPerformance.slowest_domains: metrics.get_slowest_domains,
schemas.MetricOfPerformance.sessions_per_browser: metrics.get_sessions_per_browser,
schemas.MetricOfPerformance.time_to_render: metrics.get_time_to_render,
schemas.MetricOfPerformance.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages,
schemas.MetricOfPerformance.memory_consumption: metrics.get_memory_consumption,
schemas.MetricOfPerformance.cpu: metrics.get_avg_cpu,
schemas.MetricOfPerformance.fps: metrics.get_avg_fps,
schemas.MetricOfPerformance.crashes: metrics.get_crashes,
schemas.MetricOfPerformance.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete,
schemas.MetricOfPerformance.pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfPerformance.pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfPerformance.pages_response_time_distribution: metrics.get_pages_response_time_distribution,
schemas.MetricOfResources.missing_resources: metrics.get_missing_resources_trend,
schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources,
schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time,
schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, }
def get_predefined_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \
schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict):
return PREDEFINED.get(key, lambda *args: None)(project_id=project_id, **data)
# def add_thumbnail(metric_id, user_id, project_id):
# key = generate_file_key(project_id=project_id, key=f"{metric_id}.png")
# params = {"metric_id": metric_id, "user_id": user_id, "project_id": project_id, "key": key}
# with pg_client.PostgresClient() as cur:
# query = cur.mogrify(f"""\
# UPDATE metrics
# SET thumbnail_url = %(key)s
# WHERE metric_id = %(metric_id)s
# AND project_id = %(project_id)s
# AND (user_id = %(user_id)s OR is_public)
# RETURNING metric_id;""", params)
# cur.execute(query)
# row = cur.fetchone()
# if row is None:
# return {"errors": ["Card not found"]}
# return {"data": s3.get_presigned_url_for_upload(bucket=config('THUMBNAILS_BUCKET'), expires_in=180, key=key,
# # content-length-range is in bytes
# conditions=["content-length-range", 1, 1 * 1024 * 1024],
# content_type="image/png")}
#
#
# def generate_file_key(project_id, key):
# return f"{project_id}/cards/{key}"

View file

@ -1,335 +0,0 @@
import json
import schemas
from chalicelib.core import custom_metrics
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
from decouple import config
if config("EXP_METRICS", cast=bool, default=False):
from . import metrics_exp as metrics
else:
from . import metrics as metrics
# category name should be lower cased
CATEGORY_DESCRIPTION = {
'web vitals': 'A set of metrics that assess app performance on criteria such as load time, load performance, and stability.',
'custom': 'Previously created custom metrics by me and my team.',
'errors': 'Keep a closer eye on errors and track their type, origin and domain.',
'performance': 'Optimize your apps performance by tracking slow domains, page response times, memory consumption, CPU usage and more.',
'resources': 'Find out which resources are missing and those that may be slowing your web app.'
}
def get_templates(project_id, user_id):
with pg_client.PostgresClient() as cur:
pg_query = cur.mogrify(f"""SELECT category, jsonb_agg(metrics ORDER BY name) AS widgets
FROM (SELECT * , default_config AS config
FROM metrics LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index), '[]'::jsonb) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
WHERE deleted_at IS NULL
AND (project_id ISNULL OR (project_id = %(project_id)s AND (is_public OR user_id= %(userId)s)))
) AS metrics
GROUP BY category
ORDER BY ARRAY_POSITION(ARRAY ['custom','overview','errors','performance','resources'], category);""",
{"project_id": project_id, "userId": user_id})
cur.execute(pg_query)
rows = cur.fetchall()
for r in rows:
r["description"] = CATEGORY_DESCRIPTION.get(r["category"].lower(), "")
for w in r["widgets"]:
w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
for s in w["series"]:
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
return helper.list_to_camel_case(rows)
def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema):
with pg_client.PostgresClient() as cur:
pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned, description)
VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s, %(description)s)
RETURNING *"""
params = {"userId": user_id, "projectId": project_id, **data.dict()}
if data.metrics is not None and len(data.metrics) > 0:
pg_query = f"""WITH dash AS ({pg_query})
INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
VALUES {",".join([f"((SELECT dashboard_id FROM dash),%(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])}
RETURNING (SELECT dashboard_id FROM dash)"""
for i, m in enumerate(data.metrics):
params[f"metric_id_{i}"] = m
# params[f"config_{i}"] = schemas.AddWidgetToDashboardPayloadSchema.schema() \
# .get("properties", {}).get("config", {}).get("default", {})
# params[f"config_{i}"]["position"] = i
# params[f"config_{i}"] = json.dumps(params[f"config_{i}"])
params[f"config_{i}"] = json.dumps({"position": i})
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
if row is None:
return {"errors": ["something went wrong while creating the dashboard"]}
return {"data": get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=row["dashboard_id"])}
def get_dashboards(project_id, user_id):
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT *
FROM dashboards
WHERE deleted_at ISNULL
AND project_id = %(projectId)s
AND (user_id = %(userId)s OR is_public);"""
params = {"userId": user_id, "projectId": project_id}
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
return helper.list_to_camel_case(rows)
def get_dashboard(project_id, user_id, dashboard_id):
with pg_client.PostgresClient() as cur:
pg_query = """SELECT dashboards.*, all_metric_widgets.widgets AS widgets
FROM dashboards
LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(raw_metrics), '[]') AS widgets
FROM (SELECT dashboard_widgets.*, metrics.*, metric_series.series
FROM metrics
INNER JOIN dashboard_widgets USING (metric_id)
LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(metric_series.* ORDER BY index),'[]') AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
WHERE dashboard_widgets.dashboard_id = dashboards.dashboard_id
AND metrics.deleted_at ISNULL
AND (metrics.project_id = %(projectId)s OR metrics.project_id ISNULL)) AS raw_metrics
) AS all_metric_widgets ON (TRUE)
WHERE dashboards.deleted_at ISNULL
AND dashboards.project_id = %(projectId)s
AND dashboard_id = %(dashboard_id)s
AND (dashboards.user_id = %(userId)s OR is_public);"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
if row is not None:
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
for w in row["widgets"]:
w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
w["config"]["col"] = w["default_config"]["col"]
w["config"]["row"] = w["default_config"]["row"]
for s in w["series"]:
s["created_at"] = TimeUTC.datetime_to_timestamp(s["created_at"])
return helper.dict_to_camel_case(row)
def delete_dashboard(project_id, user_id, dashboard_id):
with pg_client.PostgresClient() as cur:
pg_query = """UPDATE dashboards
SET deleted_at = timezone('utc'::text, now())
WHERE dashboards.project_id = %(projectId)s
AND dashboard_id = %(dashboard_id)s
AND (dashboards.user_id = %(userId)s OR is_public);"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id}
cur.execute(cur.mogrify(pg_query, params))
return {"data": {"success": True}}
def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashboardSchema):
with pg_client.PostgresClient() as cur:
pg_query = """SELECT COALESCE(COUNT(*),0) AS count
FROM dashboard_widgets
WHERE dashboard_id = %(dashboard_id)s;"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.dict()}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
offset = row["count"]
pg_query = f"""UPDATE dashboards
SET name = %(name)s,
description= %(description)s
{", is_public = %(is_public)s" if data.is_public is not None else ""}
{", is_pinned = %(is_pinned)s" if data.is_pinned is not None else ""}
WHERE dashboards.project_id = %(projectId)s
AND dashboard_id = %(dashboard_id)s
AND (dashboards.user_id = %(userId)s OR is_public)"""
if data.metrics is not None and len(data.metrics) > 0:
pg_query = f"""WITH dash AS ({pg_query})
INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
VALUES {",".join([f"(%(dashboard_id)s, %(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])};"""
for i, m in enumerate(data.metrics):
params[f"metric_id_{i}"] = m
# params[f"config_{i}"] = schemas.AddWidgetToDashboardPayloadSchema.schema() \
# .get("properties", {}).get("config", {}).get("default", {})
# params[f"config_{i}"]["position"] = i
# params[f"config_{i}"] = json.dumps(params[f"config_{i}"])
params[f"config_{i}"] = json.dumps({"position": i + offset})
cur.execute(cur.mogrify(pg_query, params))
return get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id)
def get_widget(project_id, user_id, dashboard_id, widget_id):
with pg_client.PostgresClient() as cur:
pg_query = """SELECT metrics.*, metric_series.series
FROM dashboard_widgets
INNER JOIN dashboards USING (dashboard_id)
INNER JOIN metrics USING (metric_id)
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index), '[]'::jsonb) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
WHERE dashboard_id = %(dashboard_id)s
AND widget_id = %(widget_id)s
AND (dashboards.is_public OR dashboards.user_id = %(userId)s)
AND dashboards.deleted_at IS NULL
AND metrics.deleted_at ISNULL
AND (metrics.project_id = %(projectId)s OR metrics.project_id ISNULL)
AND (metrics.is_public OR metrics.user_id = %(userId)s);"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def add_widget(project_id, user_id, dashboard_id, data: schemas.AddWidgetToDashboardPayloadSchema):
with pg_client.PostgresClient() as cur:
pg_query = """INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
SELECT %(dashboard_id)s AS dashboard_id, %(metric_id)s AS metric_id,
%(userId)s AS user_id, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id)s)||%(config)s::jsonb AS config
WHERE EXISTS(SELECT 1 FROM dashboards
WHERE dashboards.deleted_at ISNULL AND dashboards.project_id = %(projectId)s
AND dashboard_id = %(dashboard_id)s
AND (dashboards.user_id = %(userId)s OR is_public))
RETURNING *;"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.dict()}
params["config"] = json.dumps(data.config)
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def update_widget(project_id, user_id, dashboard_id, widget_id, data: schemas.UpdateWidgetPayloadSchema):
with pg_client.PostgresClient() as cur:
pg_query = """UPDATE dashboard_widgets
SET config= %(config)s
WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s
RETURNING *;"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id,
"widget_id": widget_id, **data.dict()}
params["config"] = json.dumps(data.config)
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def remove_widget(project_id, user_id, dashboard_id, widget_id):
with pg_client.PostgresClient() as cur:
pg_query = """DELETE FROM dashboard_widgets
WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s;"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id}
cur.execute(cur.mogrify(pg_query, params))
return {"data": {"success": True}}
def pin_dashboard(project_id, user_id, dashboard_id):
with pg_client.PostgresClient() as cur:
pg_query = """UPDATE dashboards
SET is_pinned = FALSE
WHERE project_id=%(project_id)s;
UPDATE dashboards
SET is_pinned = True
WHERE dashboard_id=%(dashboard_id)s AND project_id=%(project_id)s AND deleted_at ISNULL
RETURNING *;"""
params = {"userId": user_id, "project_id": project_id, "dashboard_id": dashboard_id}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.CreateCustomMetricsSchema):
metric_id = custom_metrics.create(project_id=project_id, user_id=user_id, data=data, dashboard=True)
return add_widget(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id,
data=schemas.AddWidgetToDashboardPayloadSchema(metricId=metric_id))
PREDEFINED = {schemas.TemplatePredefinedKeys.count_sessions: metrics.get_processed_sessions,
schemas.TemplatePredefinedKeys.avg_image_load_time: metrics.get_application_activity_avg_image_load_time,
schemas.TemplatePredefinedKeys.avg_page_load_time: metrics.get_application_activity_avg_page_load_time,
schemas.TemplatePredefinedKeys.avg_request_load_time: metrics.get_application_activity_avg_request_load_time,
schemas.TemplatePredefinedKeys.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start,
schemas.TemplatePredefinedKeys.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel,
schemas.TemplatePredefinedKeys.avg_visited_pages: metrics.get_user_activity_avg_visited_pages,
schemas.TemplatePredefinedKeys.avg_session_duration: metrics.get_user_activity_avg_session_duration,
schemas.TemplatePredefinedKeys.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.TemplatePredefinedKeys.avg_pages_response_time: metrics.get_pages_response_time,
schemas.TemplatePredefinedKeys.avg_response_time: metrics.get_top_metrics_avg_response_time,
schemas.TemplatePredefinedKeys.avg_first_paint: metrics.get_top_metrics_avg_first_paint,
schemas.TemplatePredefinedKeys.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded,
schemas.TemplatePredefinedKeys.avg_till_first_bit: metrics.get_top_metrics_avg_till_first_bit,
schemas.TemplatePredefinedKeys.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive,
schemas.TemplatePredefinedKeys.count_requests: metrics.get_top_metrics_count_requests,
schemas.TemplatePredefinedKeys.avg_time_to_render: metrics.get_time_to_render,
schemas.TemplatePredefinedKeys.avg_used_js_heap_size: metrics.get_memory_consumption,
schemas.TemplatePredefinedKeys.avg_cpu: metrics.get_avg_cpu,
schemas.TemplatePredefinedKeys.avg_fps: metrics.get_avg_fps,
schemas.TemplatePredefinedKeys.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors,
schemas.TemplatePredefinedKeys.domains_errors_4xx: metrics.get_domains_errors_4xx,
schemas.TemplatePredefinedKeys.domains_errors_5xx: metrics.get_domains_errors_5xx,
schemas.TemplatePredefinedKeys.errors_per_domains: metrics.get_errors_per_domains,
schemas.TemplatePredefinedKeys.calls_errors: metrics.get_calls_errors,
schemas.TemplatePredefinedKeys.errors_by_type: metrics.get_errors_per_type,
schemas.TemplatePredefinedKeys.errors_by_origin: metrics.get_resources_by_party,
schemas.TemplatePredefinedKeys.speed_index_by_location: metrics.get_speed_index_location,
schemas.TemplatePredefinedKeys.slowest_domains: metrics.get_slowest_domains,
schemas.TemplatePredefinedKeys.sessions_per_browser: metrics.get_sessions_per_browser,
schemas.TemplatePredefinedKeys.time_to_render: metrics.get_time_to_render,
schemas.TemplatePredefinedKeys.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages,
schemas.TemplatePredefinedKeys.memory_consumption: metrics.get_memory_consumption,
schemas.TemplatePredefinedKeys.cpu_load: metrics.get_avg_cpu,
schemas.TemplatePredefinedKeys.frame_rate: metrics.get_avg_fps,
schemas.TemplatePredefinedKeys.crashes: metrics.get_crashes,
schemas.TemplatePredefinedKeys.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete,
schemas.TemplatePredefinedKeys.pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.TemplatePredefinedKeys.pages_response_time: metrics.get_pages_response_time,
schemas.TemplatePredefinedKeys.pages_response_time_distribution: metrics.get_pages_response_time_distribution,
schemas.TemplatePredefinedKeys.missing_resources: metrics.get_missing_resources_trend,
schemas.TemplatePredefinedKeys.slowest_resources: metrics.get_slowest_resources,
schemas.TemplatePredefinedKeys.resources_fetch_time: metrics.get_resources_loading_time,
schemas.TemplatePredefinedKeys.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
schemas.TemplatePredefinedKeys.resources_count_by_type: metrics.get_resources_count_by_type,
}
def get_predefined_metric(key: schemas.TemplatePredefinedKeys, project_id: int, data: dict):
return PREDEFINED.get(key, lambda *args: None)(project_id=project_id, **data)
def make_chart_metrics(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema):
raw_metric = custom_metrics.get_with_template(metric_id=metric_id, project_id=project_id, user_id=user_id,
include_dashboard=False)
if raw_metric is None:
return None
metric: schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate.parse_obj(raw_metric)
if metric.is_template and metric.predefined_key is None:
return None
if metric.is_template:
return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict())
else:
return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=metric_id, data=data,
metric=raw_metric)
def make_chart_widget(dashboard_id, project_id, user_id, widget_id, data: schemas.CustomMetricChartPayloadSchema):
raw_metric = get_widget(widget_id=widget_id, project_id=project_id, user_id=user_id, dashboard_id=dashboard_id)
if raw_metric is None:
return None
metric = schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate.parse_obj(raw_metric)
if metric.is_template:
return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict())
else:
return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=raw_metric["metricId"],
data=data, metric=raw_metric)

View file

@ -1,13 +1,14 @@
import json
from decouple import config
import schemas
from chalicelib.core import sourcemaps
from chalicelib.utils import errors_helper
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.metrics_helper import __get_step_size
from decouple import config
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
from chalicelib.core import sessions_legacy as sessions
else:
@ -90,13 +91,14 @@ def __process_tags(row):
def get_details(project_id, error_id, user_id, **data):
pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24")
pg_sub_query24.append("error_id = %(error_id)s")
pg_sub_query30_session = __get_basic_constraints(time_constraint=True, chart=False, startTime_arg_name="startDate30",
endTime_arg_name="endDate30",project_key="sessions.project_id")
pg_sub_query30_session = __get_basic_constraints(time_constraint=True, chart=False,
startTime_arg_name="startDate30",
endTime_arg_name="endDate30", project_key="sessions.project_id")
pg_sub_query30_session.append("sessions.start_ts >= %(startDate30)s")
pg_sub_query30_session.append("sessions.start_ts <= %(endDate30)s")
pg_sub_query30_session.append("error_id = %(error_id)s")
pg_sub_query30_err = __get_basic_constraints(time_constraint=True, chart=False, startTime_arg_name="startDate30",
endTime_arg_name="endDate30",project_key="errors.project_id")
endTime_arg_name="endDate30", project_key="errors.project_id")
pg_sub_query30_err.append("sessions.project_id = %(project_id)s")
pg_sub_query30_err.append("sessions.start_ts >= %(startDate30)s")
pg_sub_query30_err.append("sessions.start_ts <= %(endDate30)s")
@ -283,7 +285,7 @@ def get_details(project_id, error_id, user_id, **data):
status = cur.fetchone()
if status is not None:
row["stack"] = format_first_stack_frame(status).pop("stack")
row["stack"] = errors_helper.format_first_stack_frame(status).pop("stack")
row["status"] = status.pop("status")
row["parent_error_id"] = status.pop("parent_error_id")
row["favorite"] = status.pop("favorite")
@ -727,19 +729,6 @@ def __status_rank(status):
}.get(status)
def format_first_stack_frame(error):
error["stack"] = sourcemaps.format_payload(error.pop("payload"), truncate_to_first=True)
for s in error["stack"]:
for c in s.get("context", []):
for sci, sc in enumerate(c):
if isinstance(sc, str) and len(sc) > 1000:
c[sci] = sc[:1000]
# convert bytes to string:
if isinstance(s["filename"], bytes):
s["filename"] = s["filename"].decode("utf-8")
return error
def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(

View file

@ -744,7 +744,7 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n
else:
table_name = ""
if type_condition:
ch_sub_query.append(f"{table_name}event_type='ERROR'")
ch_sub_query.append(f"{table_name}EventType='ERROR'")
if time_constraint:
ch_sub_query += [f"{table_name}datetime >= toDateTime(%({startTime_arg_name})s/1000)",
f"{table_name}datetime < toDateTime(%({endTime_arg_name})s/1000)"]
@ -920,7 +920,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
params["maxDuration"] = f.value[1]
elif filter_type == schemas.FilterType.referrer:
# extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"
# extra_from += f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"
if is_any:
referrer_constraint = 'isNotNull(s.base_referrer)'
else:
@ -1062,7 +1062,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence
FROM {MAIN_EVENTS_TABLE}
WHERE project_id=%(project_id)s
AND event_type='ERROR'
AND EventType='ERROR'
GROUP BY error_id) AS time_details
ON details.error_id=time_details.error_id
INNER JOIN (SELECT error_id, groupArray([timestamp, count]) AS chart

View file

@ -1,21 +1,20 @@
from typing import Optional
from decouple import config
import schemas
from chalicelib.core import issues
from chalicelib.core import metadata
from chalicelib.core import sessions_metas
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.event_filter_definition import SupportedFilter, Event
from decouple import config
if config("EXP_AUTOCOMPLETE", cast=bool, default=False):
from . import autocomplete_exp as autocomplete
else:
from . import autocomplete as autocomplete
def get_customs_by_sessionId2_pg(session_id, project_id):
def get_customs_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
SELECT
@ -45,7 +44,7 @@ def __get_grouped_clickrage(rows, session_id, project_id):
for c in click_rage_issues:
merge_count = c.get("payload")
if merge_count is not None:
merge_count = merge_count.get("count", 3)
merge_count = merge_count.get("Count", 3)
else:
merge_count = 3
for i in range(len(rows)):
@ -58,246 +57,53 @@ def __get_grouped_clickrage(rows, session_id, project_id):
return rows
def get_by_sessionId2_pg(session_id, project_id, group_clickrage=False):
def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
SELECT
c.*,
'CLICK' AS type
FROM events.clicks AS c
WHERE
c.session_id = %(session_id)s
ORDER BY c.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows = cur.fetchall()
if group_clickrage:
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
cur.execute(cur.mogrify("""
SELECT
i.*,
'INPUT' AS type
FROM events.inputs AS i
WHERE
i.session_id = %(session_id)s
ORDER BY i.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows += cur.fetchall()
cur.execute(cur.mogrify("""\
SELECT
l.*,
l.path AS value,
l.path AS url,
'LOCATION' AS type
FROM events.pages AS l
WHERE
l.session_id = %(session_id)s
ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
rows += cur.fetchall()
rows = []
if event_type is None or event_type == schemas.EventType.click:
cur.execute(cur.mogrify("""\
SELECT
c.*,
'CLICK' AS type
FROM events.clicks AS c
WHERE
c.session_id = %(session_id)s
ORDER BY c.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows += cur.fetchall()
if group_clickrage:
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
if event_type is None or event_type == schemas.EventType.input:
cur.execute(cur.mogrify("""
SELECT
i.*,
'INPUT' AS type
FROM events.inputs AS i
WHERE
i.session_id = %(session_id)s
ORDER BY i.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows += cur.fetchall()
if event_type is None or event_type == schemas.EventType.location:
cur.execute(cur.mogrify("""\
SELECT
l.*,
l.path AS value,
l.path AS url,
'LOCATION' AS type
FROM events.pages AS l
WHERE
l.session_id = %(session_id)s
ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
rows += cur.fetchall()
rows = helper.list_to_camel_case(rows)
rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"]))
return rows
def __pg_errors_query(source=None, value_length=None):
if value_length is None or value_length > 2:
return f"""((SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(value)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5));"""
return f"""((SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5));"""
def __search_pg_errors(project_id, value, key=None, source=None):
now = TimeUTC.now()
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(__pg_errors_query(source,
value_length=len(value)),
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value),
"source": source}))
results = helper.list_to_camel_case(cur.fetchall())
print(f"{TimeUTC.now() - now} : errors")
return results
def __search_pg_errors_ios(project_id, value, key=None, source=None):
now = TimeUTC.now()
if len(value) > 2:
query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.reason ILIKE %(svalue)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.reason ILIKE %(value)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
LIMIT 5);"""
else:
query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.reason ILIKE %(svalue)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
LIMIT 5);"""
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall())
print(f"{TimeUTC.now() - now} : errors")
return results
def __search_pg_metadata(project_id, value, key=None, source=None):
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
if len(meta_keys) == 0 or key is not None and key not in meta_keys.keys():
return []
sub_from = []
if key is not None:
meta_keys = {key: meta_keys[key]}
for k in meta_keys.keys():
colname = metadata.index_to_colname(meta_keys[k])
if len(value) > 2:
sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)
UNION
(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(value)s LIMIT 5))
""")
else:
sub_from.append(f"""(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
SELECT key, value, 'METADATA' AS TYPE
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall())
return results
class event_type:
class EventType:
CLICK = Event(ui_type=schemas.EventType.click, table="events.clicks", column="label")
INPUT = Event(ui_type=schemas.EventType.input, table="events.inputs", column="label")
LOCATION = Event(ui_type=schemas.EventType.location, table="events.pages", column="path")
@ -319,46 +125,46 @@ class event_type:
SUPPORTED_TYPES = {
event_type.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CLICK),
query=autocomplete.__generic_query(typename=event_type.CLICK.ui_type)),
event_type.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.INPUT),
query=autocomplete.__generic_query(typename=event_type.INPUT.ui_type)),
event_type.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.LOCATION),
query=autocomplete.__generic_query(
typename=event_type.LOCATION.ui_type)),
event_type.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CUSTOM),
query=autocomplete.__generic_query(typename=event_type.CUSTOM.ui_type)),
event_type.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.REQUEST),
EventType.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK),
query=autocomplete.__generic_query(typename=EventType.CLICK.ui_type)),
EventType.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT),
query=autocomplete.__generic_query(typename=EventType.INPUT.ui_type)),
EventType.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.LOCATION),
query=autocomplete.__generic_query(
typename=event_type.REQUEST.ui_type)),
event_type.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.GRAPHQL),
query=autocomplete.__generic_query(
typename=event_type.GRAPHQL.ui_type)),
event_type.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.STATEACTION),
query=autocomplete.__generic_query(
typename=event_type.STATEACTION.ui_type)),
event_type.ERROR.ui_type: SupportedFilter(get=__search_pg_errors,
query=None),
event_type.METADATA.ui_type: SupportedFilter(get=__search_pg_metadata,
query=None),
# IOS
event_type.CLICK_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CLICK_IOS),
query=autocomplete.__generic_query(
typename=event_type.CLICK_IOS.ui_type)),
event_type.INPUT_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.INPUT_IOS),
query=autocomplete.__generic_query(
typename=event_type.INPUT_IOS.ui_type)),
event_type.VIEW_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.VIEW_IOS),
query=autocomplete.__generic_query(
typename=event_type.VIEW_IOS.ui_type)),
event_type.CUSTOM_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CUSTOM_IOS),
typename=EventType.LOCATION.ui_type)),
EventType.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM),
query=autocomplete.__generic_query(typename=EventType.CUSTOM.ui_type)),
EventType.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST),
query=autocomplete.__generic_query(
typename=EventType.REQUEST.ui_type)),
EventType.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.GRAPHQL),
query=autocomplete.__generic_query(
typename=EventType.GRAPHQL.ui_type)),
EventType.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.STATEACTION),
query=autocomplete.__generic_query(
typename=event_type.CUSTOM_IOS.ui_type)),
event_type.REQUEST_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.REQUEST_IOS),
query=autocomplete.__generic_query(
typename=event_type.REQUEST_IOS.ui_type)),
event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors_ios,
query=None),
typename=EventType.STATEACTION.ui_type)),
EventType.ERROR.ui_type: SupportedFilter(get=autocomplete.__search_errors,
query=None),
EventType.METADATA.ui_type: SupportedFilter(get=autocomplete.__search_metadata,
query=None),
# IOS
EventType.CLICK_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK_IOS),
query=autocomplete.__generic_query(
typename=EventType.CLICK_IOS.ui_type)),
EventType.INPUT_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT_IOS),
query=autocomplete.__generic_query(
typename=EventType.INPUT_IOS.ui_type)),
EventType.VIEW_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.VIEW_IOS),
query=autocomplete.__generic_query(
typename=EventType.VIEW_IOS.ui_type)),
EventType.CUSTOM_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM_IOS),
query=autocomplete.__generic_query(
typename=EventType.CUSTOM_IOS.ui_type)),
EventType.REQUEST_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST_IOS),
query=autocomplete.__generic_query(
typename=EventType.REQUEST_IOS.ui_type)),
EventType.ERROR_IOS.ui_type: SupportedFilter(get=autocomplete.__search_errors_ios,
query=None),
}
@ -366,7 +172,7 @@ def get_errors_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
FROM {event_type.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
FROM {EventType.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
errors = cur.fetchall()
@ -383,11 +189,9 @@ def search(text, event_type, project_id, source, key):
rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source)
# for IOS events autocomplete
# if event_type + "_IOS" in SUPPORTED_TYPES.keys():
# rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
# source=source)
# rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,source=source)
elif event_type + "_IOS" in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
source=source)
rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, source=source)
elif event_type in sessions_metas.SUPPORTED_TYPES.keys():
return sessions_metas.search(text, event_type, project_id)
elif event_type.endswith("_IOS") \

View file

@ -4,8 +4,8 @@ from typing import List
import chalicelib.utils.helper
import schemas
from chalicelib.core import significance
from chalicelib.utils import dev
from chalicelib.utils import helper, pg_client
from chalicelib.utils import sql_helper as sh
from chalicelib.utils.TimeUTC import TimeUTC
from decouple import config
@ -46,7 +46,7 @@ def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]):
if not isinstance(e.value, list):
e.value = [e.value]
is_any = sessions._isAny_opreator(e.operator)
is_any = sh.isAny_opreator(e.operator)
if not is_any and isinstance(e.value, list) and len(e.value) == 0:
continue
events.append(e)
@ -163,7 +163,7 @@ def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date
def get_possible_issue_types(project_id):
return [{"type": t, "title": chalicelib.utils.helper.get_issue_title(t)} for t in
return [{"type": t, "title": helper.get_issue_title(t)} for t in
['click_rage', 'dead_click', 'excessive_scrolling',
'bad_request', 'missing_resource', 'memory', 'cpu',
'slow_resource', 'slow_page_load', 'crash', 'custom_event_error',
@ -260,7 +260,7 @@ def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.Fu
# def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema):
def get_top_insights_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema):
def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema):
data.events = filter_stages(__parse_events(data.events))
data.events = __fix_stages(data.events)
if len(data.events) == 0:
@ -309,7 +309,7 @@ def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSe
# def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema):
def get_issues_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema):
def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema):
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) < 0:

View file

@ -119,7 +119,7 @@ def get_role_by_name(tenant_id, name):
cur.execute(
cur.mogrify("""SELECT *
FROM public.roles
where tenant_id =%(tenant_id)s
WHERE tenant_id =%(tenant_id)s
AND deleted_at IS NULL
AND name ILIKE %(name)s;""",
{"tenant_id": tenant_id, "name": name})

View file

@ -3,9 +3,10 @@ from typing import List
import schemas
import schemas_ee
from chalicelib.core import events, metadata, events_ios, \
sessions_mobs, issues, projects, errors, resources, assist, performance_event, sessions_viewed, sessions_favorite, \
sessions_mobs, issues, projects, resources, assist, performance_event, sessions_favorite, \
sessions_devtool, sessions_notes
from chalicelib.utils import pg_client, helper, metrics_helper
from chalicelib.utils import pg_client, helper, metrics_helper, errors_helper
from chalicelib.utils import sql_helper as sh
SESSION_PROJECTION_COLS = """s.project_id,
s.session_id::text AS session_id,
@ -62,7 +63,7 @@ def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, fu
(SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key,
encode(file_key,'hex') AS file_key
{"," if len(extra_query) > 0 else ""}{",".join(extra_query)}
{(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata._get_column_names()]) + ") AS project_metadata") if group_metadata else ''}
{(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''}
FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""}
WHERE s.project_id = %(project_id)s
AND s.session_id = %(session_id)s;""",
@ -86,16 +87,16 @@ def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, fu
session_id=session_id)
data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id)
else:
data['events'] = events.get_by_sessionId2_pg(project_id=project_id, session_id=session_id,
group_clickrage=True)
data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id,
group_clickrage=True)
all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id)
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
# to keep only the first stack
# limit the number of errors to reduce the response-body size
data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors
data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors
if e['source'] == "js_exception"][:500]
data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id,
session_id=session_id)
data['userEvents'] = events.get_customs_by_session_id(project_id=project_id,
session_id=session_id)
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id)
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id)
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
@ -117,67 +118,6 @@ def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, fu
return None
def __get_sql_operator(op: schemas.SearchEventOperator):
return {
schemas.SearchEventOperator._is: "=",
schemas.SearchEventOperator._is_any: "IN",
schemas.SearchEventOperator._on: "=",
schemas.SearchEventOperator._on_any: "IN",
schemas.SearchEventOperator._is_not: "!=",
schemas.SearchEventOperator._not_on: "!=",
schemas.SearchEventOperator._contains: "ILIKE",
schemas.SearchEventOperator._not_contains: "NOT ILIKE",
schemas.SearchEventOperator._starts_with: "ILIKE",
schemas.SearchEventOperator._ends_with: "ILIKE",
}.get(op, "=")
def __is_negation_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_not,
schemas.SearchEventOperator._not_on,
schemas.SearchEventOperator._not_contains]
def __reverse_sql_operator(op):
return "=" if op == "!=" else "!=" if op == "=" else "ILIKE" if op == "NOT ILIKE" else "NOT ILIKE"
def __get_sql_operator_multiple(op: schemas.SearchEventOperator):
return " IN " if op not in [schemas.SearchEventOperator._is_not, schemas.SearchEventOperator._not_on,
schemas.SearchEventOperator._not_contains] else " NOT IN "
def __get_sql_value_multiple(values):
if isinstance(values, tuple):
return values
return tuple(values) if isinstance(values, list) else (values,)
def _multiple_conditions(condition, values, value_key="value", is_not=False):
query = []
for i in range(len(values)):
k = f"{value_key}_{i}"
query.append(condition.replace(value_key, k))
return "(" + (" AND " if is_not else " OR ").join(query) + ")"
def _multiple_values(values, value_key="value"):
query_values = {}
if values is not None and isinstance(values, list):
for i in range(len(values)):
k = f"{value_key}_{i}"
query_values[k] = values[i]
return query_values
def _isAny_opreator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._on_any, schemas.SearchEventOperator._is_any]
def _isUndefined_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_undefined]
# This function executes the query and return result
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False):
@ -307,13 +247,13 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType,
metric_of: schemas.TableMetricOfType, metric_value: List):
metric_of: schemas.MetricOfTable, metric_value: List):
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endDate, startTimestamp=data.startDate,
density=density, factor=1, decimal=True))
extra_event = None
if metric_of == schemas.TableMetricOfType.visited_url:
if metric_of == schemas.MetricOfTable.visited_url:
extra_event = "events.pages"
elif metric_of == schemas.TableMetricOfType.issues and len(metric_value) > 0:
elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
operator=schemas.SearchEventOperator._is))
full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False,
@ -356,18 +296,18 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
else:
sessions = cur.fetchone()["count"]
elif metric_type == schemas.MetricType.table:
if isinstance(metric_of, schemas.TableMetricOfType):
if isinstance(metric_of, schemas.MetricOfTable):
main_col = "user_id"
extra_col = ""
extra_where = ""
pre_query = ""
if metric_of == schemas.TableMetricOfType.user_country:
if metric_of == schemas.MetricOfTable.user_country:
main_col = "user_country"
elif metric_of == schemas.TableMetricOfType.user_device:
elif metric_of == schemas.MetricOfTable.user_device:
main_col = "user_device"
elif metric_of == schemas.TableMetricOfType.user_browser:
elif metric_of == schemas.MetricOfTable.user_browser:
main_col = "user_browser"
elif metric_of == schemas.TableMetricOfType.issues:
elif metric_of == schemas.MetricOfTable.issues:
main_col = "issue"
extra_col = f", UNNEST(s.issue_types) AS {main_col}"
if len(metric_value) > 0:
@ -377,7 +317,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
extra_where.append(f"{main_col} = %({arg_name})s")
full_args[arg_name] = metric_value[i]
extra_where = f"WHERE ({' OR '.join(extra_where)})"
elif metric_of == schemas.TableMetricOfType.visited_url:
elif metric_of == schemas.MetricOfTable.visited_url:
main_col = "path"
extra_col = ", path"
main_query = cur.mogrify(f"""{pre_query}
@ -423,7 +363,8 @@ def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema):
# this function generates the query and return the generated-query with the dict of query arguments
def search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id, user_id, extra_event=None):
def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue,
project_id, user_id, extra_event=None):
ss_constraints = []
full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate,
"projectId": project_id, "userId": user_id}
@ -441,15 +382,15 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
filter_type = f.type
f.value = helper.values_for_operator(value=f.value, op=f.operator)
f_k = f"f_value{i}"
full_args = {**full_args, **_multiple_values(f.value, value_key=f_k)}
op = __get_sql_operator(f.operator) \
full_args = {**full_args, **sh.multi_values(f.value, value_key=f_k)}
op = sh.get_sql_operator(f.operator) \
if filter_type not in [schemas.FilterType.events_count] else f.operator
is_any = _isAny_opreator(f.operator)
is_undefined = _isUndefined_operator(f.operator)
is_any = sh.isAny_opreator(f.operator)
is_undefined = sh.isUndefined_operator(f.operator)
if not is_any and not is_undefined and len(f.value) == 0:
continue
is_not = False
if __is_negation_operator(f.operator):
if sh.is_negation_operator(f.operator):
is_not = True
if filter_type == schemas.FilterType.user_browser:
if is_any:
@ -457,9 +398,10 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.user_browser IS NOT NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios]:
if is_any:
@ -467,9 +409,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.user_os IS NOT NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios]:
if is_any:
@ -477,9 +419,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.user_device IS NOT NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios]:
if is_any:
@ -487,9 +429,10 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.user_country IS NOT NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_source]:
if is_any:
@ -500,11 +443,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.utm_source IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f's.utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f'ms.utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_medium]:
if is_any:
extra_constraints.append('s.utm_medium IS NOT NULL')
@ -514,11 +457,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.utm_medium IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f's.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f'ms.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_campaign]:
if is_any:
extra_constraints.append('s.utm_campaign IS NOT NULL')
@ -528,11 +471,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.utm_campaign IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f's.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f'ms.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.duration:
if len(f.value) > 0 and f.value[0] is not None:
@ -549,8 +492,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
extra_constraints.append('s.base_referrer IS NOT NULL')
else:
extra_constraints.append(
_multiple_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
elif filter_type == events.event_type.METADATA.ui_type:
sh.multi_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == events.EventType.METADATA.ui_type:
# get metadata list only if you need it
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
@ -564,11 +508,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append(f"ms.{metadata.index_to_colname(meta_keys[f.source])} IS NULL")
else:
extra_constraints.append(
_multiple_conditions(
sh.multi_conditions(
f"s.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text",
f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(
sh.multi_conditions(
f"ms.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text",
f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
@ -580,9 +524,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.user_id IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f"s.user_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f"s.user_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.user_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f"ms.user_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_ios]:
if is_any:
@ -593,11 +539,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.user_anonymous_id IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f"s.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
if is_any:
extra_constraints.append('s.rev_id IS NOT NULL')
@ -607,40 +553,58 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.rev_id IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f"s.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f"s.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f"ms.rev_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.platform:
# op = __get_sql_operator(f.operator)
# op = __ sh.get_sql_operator(f.operator)
extra_constraints.append(
_multiple_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.issue:
if is_any:
extra_constraints.append("array_length(s.issue_types, 1) > 0")
ss_constraints.append("array_length(ms.issue_types, 1) > 0")
else:
extra_constraints.append(
_multiple_conditions(f"%({f_k})s {op} ANY (s.issue_types)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"%({f_k})s {op} ANY (s.issue_types)", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not,
value_key=f_k))
# search sessions with click_rage on a specific selector
if len(f.filters) > 0 and schemas.IssueType.click_rage in f.value:
for j, sf in enumerate(f.filters):
if sf.operator == schemas.IssueFilterOperator._on_selector:
f_k = f"f_value{i}_{j}"
full_args = {**full_args, **sh.multi_values(sf.value, value_key=f_k)}
extra_constraints += ["mc.timestamp>=%(startDate)s",
"mc.timestamp<=%(endDate)s",
"mis.type='click_rage'",
sh.multi_conditions(f"mc.selector=%({f_k})s",
sf.value, is_not=is_not,
value_key=f_k)]
extra_from += """INNER JOIN events.clicks AS mc USING(session_id)
INNER JOIN events_common.issues USING (session_id,timestamp)
INNER JOIN public.issues AS mis USING (issue_id)\n"""
elif filter_type == schemas.FilterType.events_count:
extra_constraints.append(
_multiple_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
# ---------------------------------------------------------------------------
if len(data.events) > 0:
valid_events_count = 0
for event in data.events:
is_any = _isAny_opreator(event.operator)
is_any = sh.isAny_opreator(event.operator)
if not isinstance(event.value, list):
event.value = [event.value]
if __is_valid_event(is_any=is_any, event=event):
@ -652,16 +616,16 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
events_joiner = " UNION " if or_events else " INNER JOIN LATERAL "
for i, event in enumerate(data.events):
event_type = event.type
is_any = _isAny_opreator(event.operator)
is_any = sh.isAny_opreator(event.operator)
if not isinstance(event.value, list):
event.value = [event.value]
if not __is_valid_event(is_any=is_any, event=event):
continue
op = __get_sql_operator(event.operator)
op = sh.get_sql_operator(event.operator)
is_not = False
if __is_negation_operator(event.operator):
if sh.is_negation_operator(event.operator):
is_not = True
op = __reverse_sql_operator(op)
op = sh.reverse_sql_operator(op)
if event_index == 0 or or_events:
event_from = "%s INNER JOIN public.sessions AS ms USING (session_id)"
event_where = ["ms.project_id = %(projectId)s", "main.timestamp >= %(startDate)s",
@ -681,116 +645,120 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
if event.type != schemas.PerformanceEventType.time_between_events:
event.value = helper.values_for_operator(value=event.value, op=event.operator)
full_args = {**full_args,
**_multiple_values(event.value, value_key=e_k),
**_multiple_values(event.source, value_key=s_k)}
**sh.multi_values(event.value, value_key=e_k),
**sh.multi_values(event.source, value_key=s_k)}
if event_type == events.event_type.CLICK.ui_type:
event_from = event_from % f"{events.event_type.CLICK.table} AS main "
if event_type == events.EventType.CLICK.ui_type:
event_from = event_from % f"{events.EventType.CLICK.table} AS main "
if not is_any:
if event.operator == schemas.ClickEventExtraOperator._on_selector:
event_where.append(
sh.multi_conditions(f"main.selector = %({e_k})s", event.value, value_key=e_k))
else:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.CLICK.column} {op} %({e_k})s", event.value,
value_key=e_k))
elif event_type == events.EventType.INPUT.ui_type:
event_from = event_from % f"{events.EventType.INPUT.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.CLICK.column} {op} %({e_k})s", event.value,
value_key=e_k))
elif event_type == events.event_type.INPUT.ui_type:
event_from = event_from % f"{events.event_type.INPUT.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.INPUT.column} {op} %({e_k})s", event.value,
value_key=e_k))
sh.multi_conditions(f"main.{events.EventType.INPUT.column} {op} %({e_k})s", event.value,
value_key=e_k))
if event.source is not None and len(event.source) > 0:
event_where.append(_multiple_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key=f"custom{i}"))
full_args = {**full_args, **_multiple_values(event.source, value_key=f"custom{i}")}
event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key=f"custom{i}"))
full_args = {**full_args, **sh.multi_values(event.source, value_key=f"custom{i}")}
elif event_type == events.event_type.LOCATION.ui_type:
event_from = event_from % f"{events.event_type.LOCATION.table} AS main "
elif event_type == events.EventType.LOCATION.ui_type:
event_from = event_from % f"{events.EventType.LOCATION.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.LOCATION.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.CUSTOM.ui_type:
event_from = event_from % f"{events.event_type.CUSTOM.table} AS main "
sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.CUSTOM.ui_type:
event_from = event_from % f"{events.EventType.CUSTOM.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.CUSTOM.column} {op} %({e_k})s", event.value,
value_key=e_k))
elif event_type == events.event_type.REQUEST.ui_type:
event_from = event_from % f"{events.event_type.REQUEST.table} AS main "
sh.multi_conditions(f"main.{events.EventType.CUSTOM.column} {op} %({e_k})s", event.value,
value_key=e_k))
elif event_type == events.EventType.REQUEST.ui_type:
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k})s", event.value,
value_key=e_k))
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s", event.value,
value_key=e_k))
# elif event_type == events.event_type.GRAPHQL.ui_type:
# event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main "
# if not is_any:
# event_where.append(
# _multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k})s", event.value,
# value_key=e_k))
elif event_type == events.event_type.STATEACTION.ui_type:
event_from = event_from % f"{events.event_type.STATEACTION.table} AS main "
elif event_type == events.EventType.STATEACTION.ui_type:
event_from = event_from % f"{events.EventType.STATEACTION.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.STATEACTION.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.ERROR.ui_type:
event_from = event_from % f"{events.event_type.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)"
sh.multi_conditions(f"main.{events.EventType.STATEACTION.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.ERROR.ui_type:
event_from = event_from % f"{events.EventType.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)"
event.source = list(set(event.source))
if not is_any and event.value not in [None, "*", ""]:
event_where.append(
_multiple_conditions(f"(main1.message {op} %({e_k})s OR main1.name {op} %({e_k})s)",
event.value, value_key=e_k))
sh.multi_conditions(f"(main1.message {op} %({e_k})s OR main1.name {op} %({e_k})s)",
event.value, value_key=e_k))
if len(event.source) > 0 and event.source[0] not in [None, "*", ""]:
event_where.append(_multiple_conditions(f"main1.source = %({s_k})s", event.source, value_key=s_k))
event_where.append(sh.multi_conditions(f"main1.source = %({s_k})s", event.source, value_key=s_k))
# ----- IOS
elif event_type == events.event_type.CLICK_IOS.ui_type:
event_from = event_from % f"{events.event_type.CLICK_IOS.table} AS main "
elif event_type == events.EventType.CLICK_IOS.ui_type:
event_from = event_from % f"{events.EventType.CLICK_IOS.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.CLICK_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
sh.multi_conditions(f"main.{events.EventType.CLICK_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.INPUT_IOS.ui_type:
event_from = event_from % f"{events.event_type.INPUT_IOS.table} AS main "
elif event_type == events.EventType.INPUT_IOS.ui_type:
event_from = event_from % f"{events.EventType.INPUT_IOS.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.INPUT_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
sh.multi_conditions(f"main.{events.EventType.INPUT_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
if event.source is not None and len(event.source) > 0:
event_where.append(_multiple_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key="custom{i}"))
full_args = {**full_args, **_multiple_values(event.source, f"custom{i}")}
elif event_type == events.event_type.VIEW_IOS.ui_type:
event_from = event_from % f"{events.event_type.VIEW_IOS.table} AS main "
event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key="custom{i}"))
full_args = {**full_args, **sh.multi_values(event.source, f"custom{i}")}
elif event_type == events.EventType.VIEW_IOS.ui_type:
event_from = event_from % f"{events.EventType.VIEW_IOS.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.VIEW_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.CUSTOM_IOS.ui_type:
event_from = event_from % f"{events.event_type.CUSTOM_IOS.table} AS main "
sh.multi_conditions(f"main.{events.EventType.VIEW_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.CUSTOM_IOS.ui_type:
event_from = event_from % f"{events.EventType.CUSTOM_IOS.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.CUSTOM_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.REQUEST_IOS.ui_type:
event_from = event_from % f"{events.event_type.REQUEST_IOS.table} AS main "
sh.multi_conditions(f"main.{events.EventType.CUSTOM_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.REQUEST_IOS.ui_type:
event_from = event_from % f"{events.EventType.REQUEST_IOS.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.REQUEST_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.ERROR_IOS.ui_type:
event_from = event_from % f"{events.event_type.ERROR_IOS.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_id)"
sh.multi_conditions(f"main.{events.EventType.REQUEST_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.ERROR_IOS.ui_type:
event_from = event_from % f"{events.EventType.ERROR_IOS.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_id)"
if not is_any and event.value not in [None, "*", ""]:
event_where.append(
_multiple_conditions(f"(main1.reason {op} %({e_k})s OR main1.name {op} %({e_k})s)",
event.value, value_key=e_k))
sh.multi_conditions(f"(main1.reason {op} %({e_k})s OR main1.name {op} %({e_k})s)",
event.value, value_key=e_k))
elif event_type == schemas.PerformanceEventType.fetch_failed:
event_from = event_from % f"{events.event_type.REQUEST.table} AS main "
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k})s",
event.value, value_key=e_k))
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s",
event.value, value_key=e_k))
col = performance_event.get_col(event_type)
colname = col["column"]
event_where.append(f"main.{colname} = FALSE")
@ -804,7 +772,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
# colname = col["column"]
# tname = "main"
# e_k += "_custom"
# full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)}
# full_args = {**full_args, **_ sh.multiple_values(event.source, value_key=e_k)}
# event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " +
# _multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
# event.source, value_key=e_k))
@ -814,7 +782,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
schemas.PerformanceEventType.location_avg_cpu_load,
schemas.PerformanceEventType.location_avg_memory_usage
]:
event_from = event_from % f"{events.event_type.LOCATION.table} AS main "
event_from = event_from % f"{events.EventType.LOCATION.table} AS main "
col = performance_event.get_col(event_type)
colname = col["column"]
tname = "main"
@ -825,16 +793,16 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
f"{tname}.timestamp <= %(endDate)s"]
if not is_any:
event_where.append(
_multiple_conditions(f"main.{events.event_type.LOCATION.column} {op} %({e_k})s",
event.value, value_key=e_k))
sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s",
event.value, value_key=e_k))
e_k += "_custom"
full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)}
full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)}
event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " +
_multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
elif event_type == schemas.PerformanceEventType.time_between_events:
event_from = event_from % f"{getattr(events.event_type, event.value[0].type).table} AS main INNER JOIN {getattr(events.event_type, event.value[1].type).table} AS main2 USING(session_id) "
event_from = event_from % f"{getattr(events.EventType, event.value[0].type).table} AS main INNER JOIN {getattr(events.EventType, event.value[1].type).table} AS main2 USING(session_id) "
if not isinstance(event.value[0].value, list):
event.value[0].value = [event.value[0].value]
if not isinstance(event.value[1].value, list):
@ -846,98 +814,99 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
e_k1 = e_k + "_e1"
e_k2 = e_k + "_e2"
full_args = {**full_args,
**_multiple_values(event.value[0].value, value_key=e_k1),
**_multiple_values(event.value[1].value, value_key=e_k2)}
s_op = __get_sql_operator(event.value[0].operator)
**sh.multi_values(event.value[0].value, value_key=e_k1),
**sh.multi_values(event.value[1].value, value_key=e_k2)}
s_op = sh.get_sql_operator(event.value[0].operator)
event_where += ["main2.timestamp >= %(startDate)s", "main2.timestamp <= %(endDate)s"]
if event_index > 0 and not or_events:
event_where.append("main2.session_id=event_0.session_id")
is_any = _isAny_opreator(event.value[0].operator)
is_any = sh.isAny_opreator(event.value[0].operator)
if not is_any:
event_where.append(
_multiple_conditions(
f"main.{getattr(events.event_type, event.value[0].type).column} {s_op} %({e_k1})s",
sh.multi_conditions(
f"main.{getattr(events.EventType, event.value[0].type).column} {s_op} %({e_k1})s",
event.value[0].value, value_key=e_k1))
s_op = __get_sql_operator(event.value[1].operator)
is_any = _isAny_opreator(event.value[1].operator)
s_op = sh.get_sql_operator(event.value[1].operator)
is_any = sh.isAny_opreator(event.value[1].operator)
if not is_any:
event_where.append(
_multiple_conditions(
f"main2.{getattr(events.event_type, event.value[1].type).column} {s_op} %({e_k2})s",
sh.multi_conditions(
f"main2.{getattr(events.EventType, event.value[1].type).column} {s_op} %({e_k2})s",
event.value[1].value, value_key=e_k2))
e_k += "_custom"
full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)}
full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)}
event_where.append(
_multiple_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
sh.multi_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
elif event_type == schemas.EventType.request_details:
event_from = event_from % f"{events.event_type.REQUEST.table} AS main "
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
apply = False
for j, f in enumerate(event.filters):
is_any = _isAny_opreator(f.operator)
is_any = sh.isAny_opreator(f.operator)
if is_any or len(f.value) == 0:
continue
f.value = helper.values_for_operator(value=f.value, op=f.operator)
op = __get_sql_operator(f.operator)
op = sh.get_sql_operator(f.operator)
e_k_f = e_k + f"_fetch{j}"
full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)}
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
if f.type == schemas.FetchFilterType._url:
event_where.append(
_multiple_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k_f})s::text",
f.value, value_key=e_k_f))
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k_f})s::text",
f.value, value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._status_code:
event_where.append(
_multiple_conditions(f"main.status_code {f.operator} %({e_k_f})s::integer", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.status_code {f.operator} %({e_k_f})s::integer", f.value,
value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._method:
event_where.append(
_multiple_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._duration:
event_where.append(
_multiple_conditions(f"main.duration {f.operator} %({e_k_f})s::integer", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.duration {f.operator} %({e_k_f})s::integer", f.value,
value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._request_body:
event_where.append(
_multiple_conditions(f"main.request_body {op} %({e_k_f})s::text", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.request_body {op} %({e_k_f})s::text", f.value,
value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._response_body:
event_where.append(
_multiple_conditions(f"main.response_body {op} %({e_k_f})s::text", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.response_body {op} %({e_k_f})s::text", f.value,
value_key=e_k_f))
apply = True
else:
print(f"undefined FETCH filter: {f.type}")
if not apply:
continue
elif event_type == schemas.EventType.graphql:
event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main "
event_from = event_from % f"{events.EventType.GRAPHQL.table} AS main "
for j, f in enumerate(event.filters):
is_any = _isAny_opreator(f.operator)
is_any = sh.isAny_opreator(f.operator)
if is_any or len(f.value) == 0:
continue
f.value = helper.values_for_operator(value=f.value, op=f.operator)
op = __get_sql_operator(f.operator)
op = sh.get_sql_operator(f.operator)
e_k_f = e_k + f"_graphql{j}"
full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)}
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
if f.type == schemas.GraphqlFilterType._name:
event_where.append(
_multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k_f})s", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value,
value_key=e_k_f))
elif f.type == schemas.GraphqlFilterType._method:
event_where.append(
_multiple_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
elif f.type == schemas.GraphqlFilterType._request_body:
event_where.append(
_multiple_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
elif f.type == schemas.GraphqlFilterType._response_body:
event_where.append(
_multiple_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
else:
print(f"undefined GRAPHQL filter: {f.type}")
else:
@ -1008,7 +977,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
# b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')")
if errors_only:
extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
extra_from += f" INNER JOIN {events.EventType.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
extra_constraints.append("ser.source = 'js_exception'")
extra_constraints.append("ser.project_id = %(project_id)s")
# if error_status != schemas.ErrorStatus.all:

View file

@ -3,9 +3,10 @@ from typing import List, Union
import schemas
import schemas_ee
from chalicelib.core import events, metadata, events_ios, \
sessions_mobs, issues, projects, errors, resources, assist, performance_event, metrics, sessions_devtool, \
sessions_mobs, issues, projects, resources, assist, performance_event, metrics, sessions_devtool, \
sessions_notes
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper, errors_helper
from chalicelib.utils import sql_helper as sh
SESSION_PROJECTION_COLS_CH = """\
s.project_id,
@ -105,16 +106,16 @@ def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, fu
session_id=session_id)
data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id)
else:
data['events'] = events.get_by_sessionId2_pg(project_id=project_id, session_id=session_id,
group_clickrage=True)
data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id,
group_clickrage=True)
all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id)
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
# to keep only the first stack
# limit the number of errors to reduce the response-body size
data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors
data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors
if e['source'] == "js_exception"][:500]
data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id,
session_id=session_id)
data['userEvents'] = events.get_customs_by_session_id(project_id=project_id,
session_id=session_id)
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id)
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id)
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
@ -335,18 +336,18 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType,
metric_of: schemas.TableMetricOfType, metric_value: List):
metric_of: schemas.MetricOfTable, metric_value: List):
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endDate, startTimestamp=data.startDate,
density=density))
extra_event = None
if metric_of == schemas.TableMetricOfType.visited_url:
if metric_of == schemas.MetricOfTable.visited_url:
extra_event = f"""SELECT DISTINCT ev.session_id, ev.url_path
FROM {exp_ch_helper.get_main_events_table(data.startDate)} AS ev
WHERE ev.datetime >= toDateTime(%(startDate)s / 1000)
AND ev.datetime <= toDateTime(%(endDate)s / 1000)
AND ev.project_id = %(project_id)s
AND ev.event_type = 'LOCATION'"""
elif metric_of == schemas.TableMetricOfType.issues and len(metric_value) > 0:
AND ev.EventType = 'LOCATION'"""
elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
operator=schemas.SearchEventOperator._is))
full_args, query_part = search_query_parts_ch(data=data, error_status=None, errors_only=False,
@ -383,21 +384,21 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
elif metric_type == schemas.MetricType.table:
full_args["limit_s"] = 0
full_args["limit_e"] = 200
if isinstance(metric_of, schemas.TableMetricOfType):
if isinstance(metric_of, schemas.MetricOfTable):
main_col = "user_id"
extra_col = "s.user_id"
extra_where = ""
pre_query = ""
if metric_of == schemas.TableMetricOfType.user_country:
if metric_of == schemas.MetricOfTable.user_country:
main_col = "user_country"
extra_col = "s.user_country"
elif metric_of == schemas.TableMetricOfType.user_device:
elif metric_of == schemas.MetricOfTable.user_device:
main_col = "user_device"
extra_col = "s.user_device"
elif metric_of == schemas.TableMetricOfType.user_browser:
elif metric_of == schemas.MetricOfTable.user_browser:
main_col = "user_browser"
extra_col = "s.user_browser"
elif metric_of == schemas.TableMetricOfType.issues:
elif metric_of == schemas.MetricOfTable.issues:
main_col = "issue"
extra_col = f"arrayJoin(s.issue_types) AS {main_col}"
if len(metric_value) > 0:
@ -407,7 +408,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
extra_where.append(f"{main_col} = %({arg_name})s")
full_args[arg_name] = metric_value[i]
extra_where = f"WHERE ({' OR '.join(extra_where)})"
elif metric_of == schemas.TableMetricOfType.visited_url:
elif metric_of == schemas.MetricOfTable.visited_url:
main_col = "url_path"
extra_col = "s.url_path"
main_query = cur.format(f"""{pre_query}
@ -470,12 +471,13 @@ def __get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEve
}
if event_type not in defs:
raise Exception(f"unsupported event_type:{event_type}")
raise Exception(f"unsupported EventType:{event_type}")
return defs.get(event_type)
# this function generates the query and return the generated-query with the dict of query arguments
def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue, project_id, user_id, extra_event=None):
def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue,
project_id, user_id, extra_event=None):
ss_constraints = []
full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate,
"projectId": project_id, "userId": user_id}
@ -624,7 +626,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
ss_constraints.append(
_multiple_conditions(f"ms.base_referrer {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == events.event_type.METADATA.ui_type:
elif filter_type == events.EventType.METADATA.ui_type:
# get metadata list only if you need it
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
@ -777,32 +779,32 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
**_multiple_values(event.value, value_key=e_k),
**_multiple_values(event.source, value_key=s_k)}
if event_type == events.event_type.CLICK.ui_type:
if event_type == events.EventType.CLICK.ui_type:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = events.event_type.CLICK.column
_column = events.EventType.CLICK.column
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append({"type": f"sub.event_type='{__get_event_type(event_type)}'"})
events_conditions_not.append({"type": f"sub.EventType='{__get_event_type(event_type)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.event_type.INPUT.ui_type:
elif event_type == events.EventType.INPUT.ui_type:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = events.event_type.INPUT.column
_column = events.EventType.INPUT.column
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append({"type": f"sub.event_type='{__get_event_type(event_type)}'"})
events_conditions_not.append({"type": f"sub.EventType='{__get_event_type(event_type)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
@ -813,7 +815,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
value_key=f"custom{i}"))
full_args = {**full_args, **_multiple_values(event.source, value_key=f"custom{i}")}
elif event_type == events.event_type.LOCATION.ui_type:
elif event_type == events.EventType.LOCATION.ui_type:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = 'url_path'
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
@ -822,28 +824,28 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append({"type": f"sub.event_type='{__get_event_type(event_type)}'"})
events_conditions_not.append({"type": f"sub.EventType='{__get_event_type(event_type)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.event_type.CUSTOM.ui_type:
elif event_type == events.EventType.CUSTOM.ui_type:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = events.event_type.CUSTOM.column
_column = events.EventType.CUSTOM.column
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append({"type": f"sub.event_type='{__get_event_type(event_type)}'"})
events_conditions_not.append({"type": f"sub.EventType='{__get_event_type(event_type)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.event_type.REQUEST.ui_type:
elif event_type == events.EventType.REQUEST.ui_type:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = 'url_path'
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
@ -852,38 +854,38 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append({"type": f"sub.event_type='{__get_event_type(event_type)}'"})
events_conditions_not.append({"type": f"sub.EventType='{__get_event_type(event_type)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
# elif event_type == events.event_type.GRAPHQL.ui_type:
# elif EventType == events.EventType.GRAPHQL.ui_type:
# event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main"
# event_where.append(f"main.event_type='GRAPHQL'")
# events_conditions.append({"type": event_where[-1]})
# if not is_any:
# event_where.append(
# _multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k})s", event.value,
# _multiple_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k})s", event.value,
# value_key=e_k))
# events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.event_type.STATEACTION.ui_type:
elif event_type == events.EventType.STATEACTION.ui_type:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = events.event_type.STATEACTION.column
_column = events.EventType.STATEACTION.column
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append({"type": f"sub.event_type='{__get_event_type(event_type)}'"})
events_conditions_not.append({"type": f"sub.EventType='{__get_event_type(event_type)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
# TODO: isNot for ERROR
elif event_type == events.event_type.ERROR.ui_type:
elif event_type == events.EventType.ERROR.ui_type:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main"
events_extra_join = f"SELECT * FROM {MAIN_EVENTS_TABLE} AS main1 WHERE main1.project_id=%(project_id)s"
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
@ -913,7 +915,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append({"type": f"sub.event_type='{__get_event_type(event_type)}'"})
events_conditions_not.append({"type": f"sub.EventType='{__get_event_type(event_type)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
@ -925,13 +927,13 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
events_conditions[-1]["condition"].append(event_where[-1])
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
# elif event_type == schemas.PerformanceEventType.fetch_duration:
# event_from = event_from % f"{events.event_type.REQUEST.table} AS main "
# elif EventType == schemas.PerformanceEventType.fetch_duration:
# event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
# if not is_any:
# event_where.append(
# _multiple_conditions(f"main.url_path {op} %({e_k})s",
# event.value, value_key=e_k))
# col = performance_event.get_col(event_type)
# col = performance_event.get_col(EventType)
# colname = col["column"]
# tname = "main"
# e_k += "_custom"
@ -989,7 +991,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
# TODO: no isNot for TimeBetweenEvents
elif event_type == schemas.PerformanceEventType.time_between_events:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
# event_from = event_from % f"{getattr(events.event_type, event.value[0].type).table} AS main INNER JOIN {getattr(events.event_type, event.value[1].type).table} AS main2 USING(session_id) "
# event_from = event_from % f"{getattr(events.EventType, event.value[0].type).table} AS main INNER JOIN {getattr(events.EventType, event.value[1].type).table} AS main2 USING(session_id) "
event_where.append(f"main.event_type='{__get_event_type(event.value[0].type)}'")
events_conditions.append({"type": event_where[-1]})
event_where.append(f"main.event_type='{__get_event_type(event.value[0].type)}'")
@ -1016,7 +1018,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
if not is_any:
event_where.append(
_multiple_conditions(
f"main.{getattr(events.event_type, event.value[0].type).column} {s_op} %({e_k1})s",
f"main.{getattr(events.EventType, event.value[0].type).column} {s_op} %({e_k1})s",
event.value[0].value, value_key=e_k1))
events_conditions[-2]["condition"] = event_where[-1]
s_op = __get_sql_operator(event.value[1].operator)
@ -1024,7 +1026,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
if not is_any:
event_where.append(
_multiple_conditions(
f"main.{getattr(events.event_type, event.value[1].type).column} {s_op} %({e_k2})s",
f"main.{getattr(events.EventType, event.value[1].type).column} {s_op} %({e_k2})s",
event.value[1].value, value_key=e_k2))
events_conditions[-1]["condition"] = event_where[-1]
@ -1106,7 +1108,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)}
if f.type == schemas.GraphqlFilterType._name:
event_where.append(
_multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k_f})s", f.value,
_multiple_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value,
value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
elif f.type == schemas.GraphqlFilterType._method:
@ -1287,7 +1289,7 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
# b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')")
# if errors_only:
# extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
# extra_from += f" INNER JOIN {events.EventType.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
# extra_constraints.append("ser.source = 'js_exception'")
# extra_constraints.append("ser.project_id = %(project_id)s")
# if error_status != schemas.ErrorStatus.all:

View file

@ -0,0 +1,423 @@
import schemas, schemas_ee
from typing import List, Optional
from chalicelib.core import metrics
from chalicelib.utils import ch_client
def _table_slice(table, index):
col = list()
for row in table:
col.append(row[index])
return col
def _table_where(table, index, value):
new_table = list()
for row in table:
if row[index] == value:
new_table.append(row)
return new_table
def _sum_table_index(table, index):
s = 0
count = 0
for row in table:
v = row[index]
if v is None:
continue
s += v
count += 1
return s
def _mean_table_index(table, index):
s = _sum_table_index(table, index)
c = len(table)
return s / c
def _sort_table_index(table, index, reverse=False):
return sorted(table, key=lambda k: k[index], reverse=reverse)
def _select_rec(l, selector):
print('selector:', selector)
print('list:', l)
if len(selector) == 1:
return l[selector[0]]
else:
s = selector[0]
L = l[s]
type_ = type(s)
if type_ == slice:
return [_select_rec(l_, selector[1:]) for l_ in L]
elif type_ == int:
return [_select_rec(L, selector[1:])]
def __get_two_values(response, time_index='hh', name_index='name'):
columns = list(response[0].keys())
name_index_val = columns.index(name_index)
time_index_value = columns.index(time_index)
table = [list(r.values()) for r in response]
table_hh1 = list()
table_hh2 = list()
hh_vals = list()
names_hh1 = list()
names_hh2 = list()
for e in table:
if e[time_index_value] not in hh_vals and len(hh_vals) == 2:
break
elif e[time_index_value] not in hh_vals:
hh_vals.append(e[time_index_value])
if len(hh_vals) == 1:
table_hh1.append(e)
if e[name_index_val] not in names_hh1:
names_hh1.append(e[name_index_val])
elif len(hh_vals) == 2:
table_hh2.append(e)
if e[name_index_val] not in names_hh2:
names_hh2.append(e[name_index_val])
return table_hh1, table_hh2, columns, names_hh1, names_hh2
def query_requests_by_period(project_id, start_time, end_time, filters: Optional[schemas.SessionsSearchPayloadSchema]):
params = {
"project_id": project_id, "startTimestamp": start_time, "endTimestamp": end_time,
"step_size": metrics.__get_step_size(endTimestamp=end_time, startTimestamp=start_time, density=3)
}
params, sub_query = __filter_subquery(project_id=project_id, filters=filters, params=params)
conditions = ["event_type = 'REQUEST'"]
query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start,
toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end
SELECT T1.hh, count(T2.session_id) as sessions, avg(T2.success) as success_rate, T2.url_host as names,
T2.url_path as source, avg(T2.duration) as avg_duration
FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1
LEFT JOIN (SELECT session_id, url_host, url_path, success, message, duration, toStartOfInterval(datetime, INTERVAL %(step_size)s second) as dtime
FROM experimental.events
{sub_query}
WHERE project_id = {project_id}
AND {" AND ".join(conditions)}) AS T2 ON T2.dtime = T1.hh
GROUP BY T1.hh, T2.url_host, T2.url_path
ORDER BY T1.hh DESC;"""
with ch_client.ClickHouseClient() as conn:
query = conn.format(query=query, params=params)
res = conn.execute(query=query)
table_hh1, table_hh2, columns, this_period_hosts, last_period_hosts = __get_two_values(res, time_index='hh',
name_index='source')
test = [k[4] for k in table_hh1]
print(f'length {len(test)}, uniques {len(set(test))}')
del res
new_hosts = [x for x in this_period_hosts if x not in last_period_hosts]
common_names = [x for x in this_period_hosts if x not in new_hosts]
source_idx = columns.index('source')
duration_idx = columns.index('avg_duration')
# success_idx = columns.index('success_rate')
# delta_duration = dict()
# delta_success = dict()
new_duration_values = dict()
duration_values = dict()
for n in common_names:
d1_tmp = _table_where(table_hh1, source_idx, n)
d2_tmp = _table_where(table_hh2, source_idx, n)
old_duration = _mean_table_index(d2_tmp, duration_idx)
new_duration = _mean_table_index(d1_tmp, duration_idx)
if old_duration == 0:
continue
duration_values[n] = new_duration, old_duration, (new_duration - old_duration) / old_duration
# delta_duration[n] = (_mean_table_index(d1_tmp, duration_idx) - _duration1) / _duration1
# delta_success[n] = _mean_table_index(d1_tmp, success_idx) - _mean_table_index(d2_tmp, success_idx)
for n in new_hosts:
d1_tmp = _table_where(table_hh1, source_idx, n)
new_duration_values[n] = _mean_table_index(d1_tmp, duration_idx)
# names_idx = columns.index('names')
total = _sum_table_index(table_hh1, duration_idx)
d1_tmp = _sort_table_index(table_hh1, duration_idx, reverse=True)
_tmp = _table_slice(d1_tmp, duration_idx)
_tmp2 = _table_slice(d1_tmp, source_idx)
increase = sorted(duration_values.items(), key=lambda k: k[1][-1], reverse=True)
ratio = sorted(zip(_tmp2, _tmp), key=lambda k: k[1], reverse=True)
# names_ = set([k[0] for k in increase[:3]+ratio[:3]]+new_hosts[:3])
names_ = set([k[0] for k in increase[:3] + ratio[:3]]) # we took out new hosts since they dont give much info
results = list()
for n in names_:
if n is None:
continue
data_ = {'category': schemas_ee.InsightCategories.network, 'name': n,
'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True}
for n_, v in ratio:
if n == n_:
if n in new_hosts:
data_['value'] = new_duration_values[n]
data_['ratio'] = v / total
break
for n_, v in increase:
if n == n_:
data_['value'] = v[0]
data_['oldValue'] = v[1]
data_['change'] = v[2]
data_['isNew'] = False
break
results.append(data_)
return results
def __filter_subquery(project_id: int, filters: Optional[schemas.SessionsSearchPayloadSchema], params: dict):
sub_query = ""
if filters and (len(filters.events) > 0 or len(filters.filters)) > 0:
qp_params, sub_query = sessions_exp.search_query_parts_ch(data=filters, project_id=project_id,
error_status=None,
errors_only=True, favorite_only=None,
issue=None, user_id=None)
params = {**params, **qp_params}
sub_query = f"INNER JOIN {sub_query} USING(session_id)"
return params, sub_query
def query_most_errors_by_period(project_id, start_time, end_time,
filters: Optional[schemas.SessionsSearchPayloadSchema]):
params = {
"project_id": project_id, "startTimestamp": start_time, "endTimestamp": end_time,
"step_size": metrics.__get_step_size(endTimestamp=end_time, startTimestamp=start_time, density=3)
}
params, sub_query = __filter_subquery(project_id=project_id, filters=filters, params=params)
conditions = ["event_type = 'ERROR'"]
query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start,
toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end
SELECT T1.hh, count(T2.session_id) as sessions, T2.name as names,
groupUniqArray(T2.source) as sources
FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1
LEFT JOIN (SELECT session_id, name, source, message, toStartOfInterval(datetime, INTERVAL %(step_size)s second) as dtime
FROM experimental.events
{sub_query}
WHERE project_id = {project_id}
AND datetime >= toDateTime(%(startTimestamp)s/1000)
AND datetime < toDateTime(%(endTimestamp)s/1000)
AND {" AND ".join(conditions)}) AS T2 ON T2.dtime = T1.hh
GROUP BY T1.hh, T2.name
ORDER BY T1.hh DESC;"""
with ch_client.ClickHouseClient() as conn:
query = conn.format(query=query, params=params)
res = conn.execute(query=query)
table_hh1, table_hh2, columns, this_period_errors, last_period_errors = __get_two_values(res, time_index='hh',
name_index='names')
del res
new_errors = [x for x in this_period_errors if x not in last_period_errors]
common_errors = [x for x in this_period_errors if x not in new_errors]
sessions_idx = columns.index('sessions')
names_idx = columns.index('names')
percentage_errors = dict()
total = _sum_table_index(table_hh1, sessions_idx)
# error_increase = dict()
new_error_values = dict()
error_values = dict()
for n in this_period_errors:
percentage_errors[n] = _sum_table_index(_table_where(table_hh1, names_idx, n), sessions_idx)
new_error_values[n] = _sum_table_index(_table_where(table_hh1, names_idx, n), names_idx)
for n in common_errors:
old_errors = _sum_table_index(_table_where(table_hh2, names_idx, n), names_idx)
if old_errors == 0:
continue
new_errors = _sum_table_index(_table_where(table_hh1, names_idx, n), names_idx)
# error_increase[n] = (new_errors - old_errors) / old_errors
error_values[n] = new_errors, old_errors, (new_errors - old_errors) / old_errors
ratio = sorted(percentage_errors.items(), key=lambda k: k[1], reverse=True)
increase = sorted(error_values.items(), key=lambda k: k[1][-1], reverse=True)
names_ = set([k[0] for k in increase[:3] + ratio[:3]] + new_errors[:3])
results = list()
for n in names_:
if n is None:
continue
data_ = {'category': schemas_ee.InsightCategories.errors, 'name': n,
'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True}
for n_, v in ratio:
if n == n_:
if n in new_errors:
data_['value'] = new_error_values[n]
data_['ratio'] = v / total
break
for n_, v in increase:
if n == n_:
data_['value'] = v[0]
data_['oldValue'] = v[1]
data_['change'] = v[2]
data_['isNew'] = False
break
results.append(data_)
return results
def query_cpu_memory_by_period(project_id, start_time, end_time,
filters: Optional[schemas.SessionsSearchPayloadSchema]):
params = {
"project_id": project_id, "startTimestamp": start_time, "endTimestamp": end_time,
"step_size": metrics.__get_step_size(endTimestamp=end_time, startTimestamp=start_time, density=3)
}
params, sub_query = __filter_subquery(project_id=project_id, filters=filters, params=params)
conditions = ["event_type = 'PERFORMANCE'"]
query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start,
toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end
SELECT T1.hh, count(T2.session_id) as sessions, avg(T2.avg_cpu) as cpu_used,
avg(T2.avg_used_js_heap_size) as memory_used, T2.url_host as names, groupUniqArray(T2.url_path) as sources
FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1
LEFT JOIN (SELECT session_id, url_host, url_path, avg_used_js_heap_size, avg_cpu, toStartOfInterval(datetime, INTERVAL %(step_size)s second) as dtime
FROM experimental.events
{sub_query}
WHERE project_id = {project_id}
AND {" AND ".join(conditions)}) AS T2 ON T2.dtime = T1.hh
GROUP BY T1.hh, T2.url_host
ORDER BY T1.hh DESC;"""
with ch_client.ClickHouseClient() as conn:
query = conn.format(query=query, params=params)
res = conn.execute(query=query)
table_hh1, table_hh2, columns, this_period_resources, last_period_resources = __get_two_values(res, time_index='hh',
name_index='names')
del res
memory_idx = columns.index('memory_used')
cpu_idx = columns.index('cpu_used')
mem_newvalue = _mean_table_index(table_hh1, memory_idx)
mem_oldvalue = _mean_table_index(table_hh2, memory_idx)
cpu_newvalue = _mean_table_index(table_hh2, cpu_idx)
cpu_oldvalue = _mean_table_index(table_hh2, cpu_idx)
# TODO: what if _tmp=0 ?
mem_oldvalue = 1 if mem_oldvalue == 0 else mem_oldvalue
cpu_oldvalue = 1 if cpu_oldvalue == 0 else cpu_oldvalue
return [{'category': schemas_ee.InsightCategories.resources,
'name': 'cpu',
'value': cpu_newvalue,
'oldValue': cpu_oldvalue,
'change': (cpu_newvalue - cpu_oldvalue) / cpu_oldvalue,
'isNew': None},
{'category': schemas_ee.InsightCategories.resources,
'name': 'memory',
'value': mem_newvalue,
'oldValue': mem_oldvalue,
'change': (mem_newvalue - mem_oldvalue) / mem_oldvalue,
'isNew': None}
]
from chalicelib.core import sessions_exp
def query_click_rage_by_period(project_id, start_time, end_time,
filters: Optional[schemas.SessionsSearchPayloadSchema]):
params = {
"project_id": project_id, "startTimestamp": start_time, "endTimestamp": end_time,
"step_size": metrics.__get_step_size(endTimestamp=end_time, startTimestamp=start_time, density=3)}
params, sub_query = __filter_subquery(project_id=project_id, filters=filters, params=params)
conditions = ["issue_type = 'click_rage'", "event_type = 'ISSUE'"]
query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start,
toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end
SELECT T1.hh, count(T2.session_id) as sessions, groupUniqArray(T2.url_host) as names, T2.url_path as sources
FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1
LEFT JOIN (SELECT session_id, url_host, url_path, toStartOfInterval(datetime, INTERVAL %(step_size)s second ) as dtime
FROM experimental.events
{sub_query}
WHERE project_id = %(project_id)s
AND datetime >= toDateTime(%(startTimestamp)s/1000)
AND datetime < toDateTime(%(endTimestamp)s/1000)
AND {" AND ".join(conditions)}) AS T2 ON T2.dtime = T1.hh
GROUP BY T1.hh, T2.url_path
ORDER BY T1.hh DESC;"""
with ch_client.ClickHouseClient() as conn:
query = conn.format(query=query, params=params)
res = conn.execute(query=query)
table_hh1, table_hh2, columns, this_period_rage, last_period_rage = __get_two_values(res, time_index='hh',
name_index='sources')
del res
new_names = [x for x in this_period_rage if x not in last_period_rage]
common_names = [x for x in this_period_rage if x not in new_names]
sessions_idx = columns.index('sessions')
names_idx = columns.index('sources')
# raged_increment = dict()
raged_values = dict()
new_raged_values = dict()
# TODO verify line (188) _tmp = table_hh2[:, sessions_idx][n].sum()
for n in common_names:
if n is None:
continue
_oldvalue = _sum_table_index(_table_where(table_hh2, names_idx, n), sessions_idx)
_newvalue = _sum_table_index(_table_where(table_hh1, names_idx, n), sessions_idx)
# raged_increment[n] = (_newvalue - _oldvalue) / _oldvalue
raged_values[n] = _newvalue, _oldvalue, (_newvalue - _oldvalue) / _oldvalue
for n in new_names:
if n is None:
continue
_newvalue = _sum_table_index(_table_where(table_hh1, names_idx, n), sessions_idx)
new_raged_values[n] = _newvalue
total = _sum_table_index(table_hh1, sessions_idx)
names, ratio = _table_slice(table_hh1, names_idx), _table_slice(table_hh1, sessions_idx)
ratio = sorted(zip(names, ratio), key=lambda k: k[1], reverse=True)
increase = sorted(raged_values.items(), key=lambda k: k[1][-1], reverse=True)
names_ = set([k[0] for k in increase[:3] + ratio[:3]] + new_names[:3])
results = list()
for n in names_:
if n is None:
continue
data_ = {'category': schemas_ee.InsightCategories.rage, 'name': n,
'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True}
for n_, v in ratio:
if n == n_:
if n in new_names:
data_['value'] = new_raged_values[n]
data_['ratio'] = v / total
break
for n_, v in increase:
if n == n_:
data_['value'] = v[0]
data_['oldValue'] = v[1]
data_['change'] = v[2]
data_['isNew'] = False
break
results.append(data_)
return results
def fetch_selected(project_id, data: schemas_ee.GetInsightsSchema):
output = list()
if data.metricValue is None or len(data.metricValue) == 0:
data.metricValue = []
for v in schemas_ee.InsightCategories:
data.metricValue.append(v)
filters = None
if len(data.series) > 0:
filters = data.series[0].filter
if schemas_ee.InsightCategories.errors in data.metricValue:
output += query_most_errors_by_period(project_id=project_id, start_time=data.startTimestamp,
end_time=data.endTimestamp, filters=filters)
if schemas_ee.InsightCategories.network in data.metricValue:
output += query_requests_by_period(project_id=project_id, start_time=data.startTimestamp,
end_time=data.endTimestamp, filters=filters)
if schemas_ee.InsightCategories.rage in data.metricValue:
output += query_click_rage_by_period(project_id=project_id, start_time=data.startTimestamp,
end_time=data.endTimestamp, filters=filters)
if schemas_ee.InsightCategories.resources in data.metricValue:
output += query_cpu_memory_by_period(project_id=project_id, start_time=data.startTimestamp,
end_time=data.endTimestamp, filters=filters)
return output

View file

@ -0,0 +1,76 @@
import schemas
from chalicelib.utils.event_filter_definition import SupportedFilter
from decouple import config
if config("EXP_AUTOCOMPLETE", cast=bool, default=False):
from . import autocomplete_exp as autocomplete
else:
from . import autocomplete as autocomplete
SUPPORTED_TYPES = {
schemas.FilterType.user_os: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os)),
schemas.FilterType.user_browser: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_browser),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_browser)),
schemas.FilterType.user_device: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device)),
schemas.FilterType.user_country: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country)),
schemas.FilterType.user_id: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id)),
schemas.FilterType.user_anonymous_id: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id)),
schemas.FilterType.rev_id: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id)),
schemas.FilterType.referrer: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.referrer),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.referrer)),
schemas.FilterType.utm_campaign: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_campaign),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_campaign)),
schemas.FilterType.utm_medium: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_medium),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_medium)),
schemas.FilterType.utm_source: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_source),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_source)),
# IOS
schemas.FilterType.user_os_ios: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os_ios),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os_ios)),
schemas.FilterType.user_device_ios: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(
typename=schemas.FilterType.user_device_ios),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device_ios)),
schemas.FilterType.user_country_ios: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country_ios),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country_ios)),
schemas.FilterType.user_id_ios: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id_ios),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id_ios)),
schemas.FilterType.user_anonymous_id_ios: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id_ios),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id_ios)),
schemas.FilterType.rev_id_ios: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id_ios),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id_ios)),
}
def search(text: str, meta_type: schemas.FilterType, project_id: int):
rows = []
if meta_type not in list(SUPPORTED_TYPES.keys()):
return {"errors": ["unsupported type"]}
rows += SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text)
# for IOS events autocomplete
# if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()):
# rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text)
return {"data": rows}

View file

@ -3,16 +3,17 @@ from urllib.parse import urljoin
from decouple import config
import schemas
from chalicelib.core import sessions
from chalicelib.core.collaboration_msteams import MSTeams
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import pg_client, helper
from chalicelib.utils import sql_helper as sh
from chalicelib.utils.TimeUTC import TimeUTC
def get_note(tenant_id, project_id, user_id, note_id, share=None):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS creator_name
{",(SELECT name FROM users WHERE tenant_id=%(tenant_id)s AND user_id=%(share)s) AS share_name" if share else ""}
{",(SELECT name FROM users WHERE tenant_id=%(tenant_id)s AND user_id=%(share)s AND deleted_at ISNULL) AS share_name" if share else ""}
FROM sessions_notes INNER JOIN users USING (user_id)
WHERE sessions_notes.project_id = %(project_id)s
AND sessions_notes.note_id = %(note_id)s
@ -59,8 +60,8 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se
if data.tags and len(data.tags) > 0:
k = "tag_value"
conditions.append(
sessions._multiple_conditions(f"%({k})s = sessions_notes.tag", data.tags, value_key=k))
extra_params = sessions._multiple_values(data.tags, value_key=k)
sh.multi_conditions(f"%({k})s = sessions_notes.tag", data.tags, value_key=k))
extra_params = sh.multi_values(data.tags, value_key=k)
if data.shared_only:
conditions.append("sessions_notes.is_public AND users.tenant_id = %(tenant_id)s")
elif data.mine_only:
@ -170,3 +171,60 @@ def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id):
webhook_id=webhook_id,
body={"blocks": blocks}
)
def share_to_msteams(tenant_id, user_id, project_id, note_id, webhook_id):
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
if note is None:
return {"errors": ["Note not found"]}
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}")
if note["timestamp"] > 0:
session_url += f"&jumpto={note['timestamp']}"
title = f"[Note for session {note['sessionId']}]({session_url})"
blocks = [{
"type": "TextBlock",
"text": title,
"style": "heading",
"size": "Large"
},
{
"type": "TextBlock",
"spacing": "Small",
"text": note["message"]
}
]
if note["tag"]:
blocks.append({"type": "TextBlock",
"spacing": "Small",
"text": f"Tag: *{note['tag']}*",
"size": "Small"})
bottom = f"Created by {note['creatorName'].capitalize()}"
if user_id != note["userId"]:
bottom += f"\nSent by {note['shareName']}: "
blocks.append({"type": "TextBlock",
"spacing": "Default",
"text": bottom,
"size": "Small",
"fontType": "Monospace"})
return MSTeams.send_raw(
tenant_id=tenant_id,
webhook_id=webhook_id,
body={"type": "message",
"attachments": [
{"contentType": "application/vnd.microsoft.card.adaptive",
"contentUrl": None,
"content": {
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"type": "AdaptiveCard",
"version": "1.5",
"body": [{
"type": "ColumnSet",
"style": "emphasis",
"separator": True,
"bleed": True,
"columns": [{"width": "stretch",
"items": blocks,
"type": "Column"}]
}]}}
]})

View file

@ -92,11 +92,11 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
values["maxDuration"] = f["value"][1]
elif filter_type == schemas.FilterType.referrer:
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
filter_extra_from = [f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"]
filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"]
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f"p.base_referrer {op} %({f_k})s", f["value"], value_key=f_k))
elif filter_type == events.event_type.METADATA.ui_type:
elif filter_type == events.EventType.METADATA.ui_type:
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
@ -141,31 +141,31 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
extra_from = []
op = sessions.__get_sql_operator(s["operator"])
event_type = s["type"].upper()
if event_type == events.event_type.CLICK.ui_type:
next_table = events.event_type.CLICK.table
next_col_name = events.event_type.CLICK.column
elif event_type == events.event_type.INPUT.ui_type:
next_table = events.event_type.INPUT.table
next_col_name = events.event_type.INPUT.column
elif event_type == events.event_type.LOCATION.ui_type:
next_table = events.event_type.LOCATION.table
next_col_name = events.event_type.LOCATION.column
elif event_type == events.event_type.CUSTOM.ui_type:
next_table = events.event_type.CUSTOM.table
next_col_name = events.event_type.CUSTOM.column
if event_type == events.EventType.CLICK.ui_type:
next_table = events.EventType.CLICK.table
next_col_name = events.EventType.CLICK.column
elif event_type == events.EventType.INPUT.ui_type:
next_table = events.EventType.INPUT.table
next_col_name = events.EventType.INPUT.column
elif event_type == events.EventType.LOCATION.ui_type:
next_table = events.EventType.LOCATION.table
next_col_name = events.EventType.LOCATION.column
elif event_type == events.EventType.CUSTOM.ui_type:
next_table = events.EventType.CUSTOM.table
next_col_name = events.EventType.CUSTOM.column
# IOS --------------
elif event_type == events.event_type.CLICK_IOS.ui_type:
next_table = events.event_type.CLICK_IOS.table
next_col_name = events.event_type.CLICK_IOS.column
elif event_type == events.event_type.INPUT_IOS.ui_type:
next_table = events.event_type.INPUT_IOS.table
next_col_name = events.event_type.INPUT_IOS.column
elif event_type == events.event_type.VIEW_IOS.ui_type:
next_table = events.event_type.VIEW_IOS.table
next_col_name = events.event_type.VIEW_IOS.column
elif event_type == events.event_type.CUSTOM_IOS.ui_type:
next_table = events.event_type.CUSTOM_IOS.table
next_col_name = events.event_type.CUSTOM_IOS.column
elif event_type == events.EventType.CLICK_IOS.ui_type:
next_table = events.EventType.CLICK_IOS.table
next_col_name = events.EventType.CLICK_IOS.column
elif event_type == events.EventType.INPUT_IOS.ui_type:
next_table = events.EventType.INPUT_IOS.table
next_col_name = events.EventType.INPUT_IOS.column
elif event_type == events.EventType.VIEW_IOS.ui_type:
next_table = events.EventType.VIEW_IOS.table
next_col_name = events.EventType.VIEW_IOS.column
elif event_type == events.EventType.CUSTOM_IOS.ui_type:
next_table = events.EventType.CUSTOM_IOS.table
next_col_name = events.EventType.CUSTOM_IOS.column
else:
print("=================UNDEFINED")
continue
@ -326,7 +326,7 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues,
transitions ::: if transited from the first stage to the last - 1
else - 0
errors ::: a dictionary where the keys are all unique issues (currently context-wise)
errors ::: a dictionary WHERE the keys are all unique issues (currently context-wise)
the values are lists
if an issue happened between the first stage to the last - 1
else - 0

View file

@ -92,11 +92,11 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
values["maxDuration"] = f["value"][1]
elif filter_type == schemas.FilterType.referrer:
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
filter_extra_from = [f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"]
filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"]
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f"p.base_referrer {op} %({f_k})s", f["value"], value_key=f_k))
elif filter_type == events.event_type.METADATA.ui_type:
elif filter_type == events.EventType.METADATA.ui_type:
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
@ -141,31 +141,31 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
extra_from = []
op = sessions.__get_sql_operator(s["operator"])
event_type = s["type"].upper()
if event_type == events.event_type.CLICK.ui_type:
next_table = events.event_type.CLICK.table
next_col_name = events.event_type.CLICK.column
elif event_type == events.event_type.INPUT.ui_type:
next_table = events.event_type.INPUT.table
next_col_name = events.event_type.INPUT.column
elif event_type == events.event_type.LOCATION.ui_type:
next_table = events.event_type.LOCATION.table
next_col_name = events.event_type.LOCATION.column
elif event_type == events.event_type.CUSTOM.ui_type:
next_table = events.event_type.CUSTOM.table
next_col_name = events.event_type.CUSTOM.column
if event_type == events.EventType.CLICK.ui_type:
next_table = events.EventType.CLICK.table
next_col_name = events.EventType.CLICK.column
elif event_type == events.EventType.INPUT.ui_type:
next_table = events.EventType.INPUT.table
next_col_name = events.EventType.INPUT.column
elif event_type == events.EventType.LOCATION.ui_type:
next_table = events.EventType.LOCATION.table
next_col_name = events.EventType.LOCATION.column
elif event_type == events.EventType.CUSTOM.ui_type:
next_table = events.EventType.CUSTOM.table
next_col_name = events.EventType.CUSTOM.column
# IOS --------------
elif event_type == events.event_type.CLICK_IOS.ui_type:
next_table = events.event_type.CLICK_IOS.table
next_col_name = events.event_type.CLICK_IOS.column
elif event_type == events.event_type.INPUT_IOS.ui_type:
next_table = events.event_type.INPUT_IOS.table
next_col_name = events.event_type.INPUT_IOS.column
elif event_type == events.event_type.VIEW_IOS.ui_type:
next_table = events.event_type.VIEW_IOS.table
next_col_name = events.event_type.VIEW_IOS.column
elif event_type == events.event_type.CUSTOM_IOS.ui_type:
next_table = events.event_type.CUSTOM_IOS.table
next_col_name = events.event_type.CUSTOM_IOS.column
elif event_type == events.EventType.CLICK_IOS.ui_type:
next_table = events.EventType.CLICK_IOS.table
next_col_name = events.EventType.CLICK_IOS.column
elif event_type == events.EventType.INPUT_IOS.ui_type:
next_table = events.EventType.INPUT_IOS.table
next_col_name = events.EventType.INPUT_IOS.column
elif event_type == events.EventType.VIEW_IOS.ui_type:
next_table = events.EventType.VIEW_IOS.table
next_col_name = events.EventType.VIEW_IOS.column
elif event_type == events.EventType.CUSTOM_IOS.ui_type:
next_table = events.EventType.CUSTOM_IOS.table
next_col_name = events.EventType.CUSTOM_IOS.column
else:
print("=================UNDEFINED")
continue
@ -304,7 +304,7 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_
transitions ::: if transited from the first stage to the last - 1
else - 0
errors ::: a dictionary where the keys are all unique issues (currently context-wise)
errors ::: a dictionary WHERE the keys are all unique issues (currently context-wise)
the values are lists
if an issue happened between the first stage to the last - 1
else - 0

View file

@ -678,12 +678,12 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
)
r = cur.fetchone()
return r is not None \
and r.get("jwt_iat") is not None \
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \
or (jwt_aud.startswith("plugin") \
and (r["changed_at"] is None \
or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000)))
)
and r.get("jwt_iat") is not None \
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \
or (jwt_aud.startswith("plugin") \
and (r["changed_at"] is None \
or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000)))
)
def change_jwt_iat(user_id):
@ -742,9 +742,9 @@ def authenticate(email, password, for_change_password=False):
return True
r = helper.dict_to_camel_case(r)
jwt_iat = change_jwt_iat(r['userId'])
iat = TimeUTC.datetime_to_timestamp(jwt_iat)
return {
"jwt": authorizers.generate_jwt(r['userId'], r['tenantId'],
TimeUTC.datetime_to_timestamp(jwt_iat),
"jwt": authorizers.generate_jwt(r['userId'], r['tenantId'], iat=iat,
aud=f"front:{helper.get_stage_name()}"),
"email": email,
**r
@ -776,7 +776,7 @@ def authenticate_sso(email, internal_id, exp=None):
r = helper.dict_to_camel_case(r)
jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['userId']))
return authorizers.generate_jwt(r['userId'], r['tenantId'],
jwt_iat, aud=f"front:{helper.get_stage_name()}",
iat=jwt_iat, aud=f"front:{helper.get_stage_name()}",
exp=(exp + jwt_iat // 1000) if exp is not None else None)
return None

View file

@ -12,7 +12,7 @@ def get_by_id(webhook_id):
cur.mogrify("""\
SELECT w.*
FROM public.webhooks AS w
where w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""",
WHERE w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""",
{"webhook_id": webhook_id})
)
w = helper.dict_to_camel_case(cur.fetchone())
@ -21,15 +21,14 @@ def get_by_id(webhook_id):
return w
def get(tenant_id, webhook_id):
def get_webhook(tenant_id, webhook_id, webhook_type='webhook'):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT
webhook_id AS integration_id, webhook_id AS id, w.*
FROM public.webhooks AS w
where w.webhook_id =%(webhook_id)s AND w.tenant_id =%(tenant_id)s AND deleted_at ISNULL;""",
{"webhook_id": webhook_id, "tenant_id": tenant_id})
cur.mogrify("""SELECT w.*
FROM public.webhooks AS w
WHERE w.webhook_id =%(webhook_id)s AND w.tenant_id =%(tenant_id)s
AND deleted_at ISNULL AND type=%(webhook_type)s;""",
{"webhook_id": webhook_id, "webhook_type": webhook_type, "tenant_id": tenant_id})
)
w = helper.dict_to_camel_case(cur.fetchone())
if w:
@ -40,9 +39,7 @@ def get(tenant_id, webhook_id):
def get_by_type(tenant_id, webhook_type):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT
w.webhook_id AS integration_id, w.webhook_id AS id,w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at
cur.mogrify("""SELECT w.webhook_id,w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at
FROM public.webhooks AS w
WHERE w.tenant_id =%(tenant_id)s
AND w.type =%(type)s
@ -58,25 +55,15 @@ def get_by_type(tenant_id, webhook_type):
def get_by_tenant(tenant_id, replace_none=False):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT
webhook_id AS integration_id, webhook_id AS id,w.*
FROM public.webhooks AS w
where
w.tenant_id =%(tenant_id)s
AND deleted_at ISNULL;""",
cur.mogrify("""SELECT w.*
FROM public.webhooks AS w
WHERE w.tenant_id =%(tenant_id)s
AND deleted_at ISNULL;""",
{"tenant_id": tenant_id})
)
all = helper.list_to_camel_case(cur.fetchall())
if replace_none:
for w in all:
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
for k in w.keys():
if w[k] is None:
w[k] = ''
else:
for w in all:
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
for w in all:
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
return all
@ -89,7 +76,7 @@ def update(tenant_id, webhook_id, changes, replace_none=False):
UPDATE public.webhooks
SET {','.join(sub_query)}
WHERE tenant_id =%(tenant_id)s AND webhook_id =%(id)s AND deleted_at ISNULL
RETURNING webhook_id AS integration_id, webhook_id AS id,*;""",
RETURNING *;""",
{"tenant_id": tenant_id, "id": webhook_id, **changes})
)
w = helper.dict_to_camel_case(cur.fetchone())
@ -106,7 +93,7 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="",
query = cur.mogrify("""\
INSERT INTO public.webhooks(tenant_id, endpoint,auth_header,type,name)
VALUES (%(tenant_id)s, %(endpoint)s, %(auth_header)s, %(type)s,%(name)s)
RETURNING webhook_id AS integration_id, webhook_id AS id,*;""",
RETURNING *;""",
{"tenant_id": tenant_id, "endpoint": endpoint, "auth_header": auth_header,
"type": webhook_type, "name": name})
cur.execute(

View file

@ -85,7 +85,10 @@ async def prepare_request(request: Request):
extracted_cookies = {}
for key, morsel in cookie.items():
extracted_cookies[key] = morsel.value
session = extracted_cookies["session"]
if "session" not in extracted_cookies:
print("!!! session not found in extracted_cookies")
print(extracted_cookies)
session = extracted_cookies.get("session", {})
else:
session = {}
# If server is behind proxys or balancers use the HTTP_X_FORWARDED fields

View file

@ -8,19 +8,19 @@ if config("EXP_7D_MV", cast=bool, default=True):
print(">>> Using experimental last 7 days materialized views")
def get_main_events_table(timestamp):
def get_main_events_table(timestamp=0):
return "experimental.events_l7d_mv" \
if config("EXP_7D_MV", cast=bool, default=True) \
and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.events"
def get_main_sessions_table(timestamp):
def get_main_sessions_table(timestamp=0):
return "experimental.sessions_l7d_mv" \
if config("EXP_7D_MV", cast=bool, default=True) \
and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.sessions"
def get_main_resources_table(timestamp):
def get_main_resources_table(timestamp=0):
return "experimental.resources_l7d_mv" \
if config("EXP_7D_MV", cast=bool, default=True) \
and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.resources"
@ -43,6 +43,8 @@ def get_user_viewed_errors_table(timestamp=0):
def get_main_js_errors_sessions_table(timestamp=0):
return "experimental.js_errors_sessions_mv" # \
return get_main_events_table(timestamp=timestamp)
# enable this when js_errors_sessions_mv is fixed
# return "experimental.js_errors_sessions_mv" # \
# if config("EXP_7D_MV", cast=bool, default=True) \
# and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.events"

View file

@ -7,6 +7,10 @@ def tag_session(file_key, tag_key='retention', tag_value='vault'):
return tag_file(file_key=file_key, bucket=config("sessions_bucket"), tag_key=tag_key, tag_value=tag_value)
def tag_record(file_key, tag_key='retention', tag_value='vault'):
return tag_file(file_key=file_key, bucket=config('ASSIST_RECORDS_BUCKET'), tag_key=tag_key, tag_value=tag_value)
def tag_file(file_key, bucket, tag_key, tag_value):
return s3.client.put_object_tagging(
Bucket=bucket,

View file

@ -4,13 +4,16 @@ rm -rf ./chalicelib/core/alerts.py
#exp rm -rf ./chalicelib/core/alerts_processor.py
rm -rf ./chalicelib/core/announcements.py
rm -rf ./chalicelib/core/autocomplete.py
rm -rf ./chalicelib/core/click_maps.py
rm -rf ./chalicelib/core/collaboration_base.py
rm -rf ./chalicelib/core/collaboration_msteams.py
rm -rf ./chalicelib/core/collaboration_slack.py
rm -rf ./chalicelib/core/countries.py
#exp rm -rf ./chalicelib/core/errors.py
rm -rf ./chalicelib/core/errors_favorite.py
#exp rm -rf ./chalicelib/core/events.py
rm -rf ./chalicelib/core/events_ios.py
#exp rm -rf ./chalicelib/core/dashboards.py
rm -rf ./chalicelib/core/dashboards.py
#exp rm -rf ./chalicelib/core/funnels.py
rm -rf ./chalicelib/core/integration_base.py
rm -rf ./chalicelib/core/integration_base_issue.py
@ -33,10 +36,9 @@ rm -rf ./chalicelib/core/log_tool_sumologic.py
rm -rf ./chalicelib/core/metadata.py
rm -rf ./chalicelib/core/mobile.py
rm -rf ./chalicelib/core/sessions_assignments.py
rm -rf ./chalicelib/core/sessions_metas.py
#exp rm -rf ./chalicelib/core/sessions_metas.py
rm -rf ./chalicelib/core/sessions_mobs.py
#exp rm -rf ./chalicelib/core/significance.py
rm -rf ./chalicelib/core/slack.py
rm -rf ./chalicelib/core/socket_ios.py
rm -rf ./chalicelib/core/sourcemaps.py
rm -rf ./chalicelib/core/sourcemaps_parser.py
@ -48,6 +50,7 @@ rm -rf ./chalicelib/utils/captcha.py
rm -rf ./chalicelib/utils/dev.py
rm -rf ./chalicelib/utils/email_handler.py
rm -rf ./chalicelib/utils/email_helper.py
rm -rf ./chalicelib/utils/errors_helper.py
rm -rf ./chalicelib/utils/event_filter_definition.py
rm -rf ./chalicelib/utils/github_client_v3.py
rm -rf ./chalicelib/utils/helper.py
@ -56,6 +59,7 @@ rm -rf ./chalicelib/utils/metrics_helper.py
rm -rf ./chalicelib/utils/pg_client.py
rm -rf ./chalicelib/utils/s3.py
rm -rf ./chalicelib/utils/smtp.py
rm -rf ./chalicelib/utils/sql_helper.py
rm -rf ./chalicelib/utils/strings.py
rm -rf ./chalicelib/utils/TimeUTC.py
rm -rf ./routers/app/__init__.py

View file

@ -45,6 +45,7 @@ PG_MAXCONN=50
PG_RETRY_MAX=50
PG_RETRY_INTERVAL=2
PG_POOL=true
ASSIST_RECORDS_BUCKET=records
sessions_bucket=mobs
sessions_region=us-east-1
sourcemaps_bucket=sourcemaps

View file

@ -1,18 +1,18 @@
requests==2.28.1
urllib3==1.26.12
boto3==1.26.14
requests==2.28.2
urllib3==1.26.14
boto3==1.26.53
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.5.1
elasticsearch==8.6.0
jira==3.4.1
fastapi==0.87.0
fastapi==0.89.1
uvicorn[standard]==0.20.0
python-decouple==3.6
pydantic[email]==1.10.2
python-decouple==3.7
pydantic[email]==1.10.4
apscheduler==3.9.1.post1
clickhouse-driver==0.2.4
clickhouse-driver==0.2.5
python-multipart==0.0.5

View file

@ -1,18 +1,13 @@
requests==2.28.1
urllib3==1.26.12
boto3==1.26.14
requests==2.28.2
urllib3==1.26.14
boto3==1.26.53
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.5.1
elasticsearch==8.6.0
jira==3.4.1
fastapi==0.87.0
uvicorn[standard]==0.20.0
python-decouple==3.6
pydantic[email]==1.10.2
apscheduler==3.9.1.post1
clickhouse-driver==0.2.4
python-multipart==0.0.5
clickhouse-driver==0.2.5

View file

@ -1,19 +1,19 @@
requests==2.28.1
urllib3==1.26.12
boto3==1.26.14
requests==2.28.2
urllib3==1.26.14
boto3==1.26.53
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.5.1
elasticsearch==8.6.0
jira==3.4.1
fastapi==0.87.0
fastapi==0.89.1
uvicorn[standard]==0.20.0
python-decouple==3.6
pydantic[email]==1.10.2
python-decouple==3.7
pydantic[email]==1.10.4
apscheduler==3.9.1.post1
clickhouse-driver==0.2.4
python3-saml==1.14.0
clickhouse-driver==0.2.5
python3-saml==1.15.0
python-multipart==0.0.5

View file

@ -7,7 +7,7 @@ from starlette.responses import RedirectResponse, FileResponse
import schemas
import schemas_ee
from chalicelib.core import sessions, assist, heatmaps, sessions_favorite, sessions_assignments, errors, errors_viewed, \
errors_favorite, sessions_notes
errors_favorite, sessions_notes, click_maps
from chalicelib.core import sessions_viewed
from chalicelib.core import tenants, users, projects, license
from chalicelib.core import webhook
@ -64,19 +64,10 @@ def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)):
}}
@app.get('/projects/{projectId}', tags=['projects'])
def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = projects.get_project(tenant_id=context.tenant_id, project_id=projectId, include_last_session=True,
include_gdpr=True)
if data is None:
return {"errors": ["project not found"]}
return {"data": data}
@app.post('/integrations/slack', tags=['integrations'])
@app.put('/integrations/slack', tags=['integrations'])
def add_slack_client(data: schemas.AddSlackSchema, context: schemas.CurrentContext = Depends(OR_context)):
n = Slack.add_channel(tenant_id=context.tenant_id, url=data.url, name=data.name)
def add_slack_client(data: schemas.AddCollaborationSchema, context: schemas.CurrentContext = Depends(OR_context)):
n = Slack.add(tenant_id=context.tenant_id, data=data)
if n is None:
return {
"errors": ["We couldn't send you a test message on your Slack channel. Please verify your webhook url."]
@ -85,7 +76,7 @@ def add_slack_client(data: schemas.AddSlackSchema, context: schemas.CurrentConte
@app.post('/integrations/slack/{integrationId}', tags=['integrations'])
def edit_slack_integration(integrationId: int, data: schemas.EditSlackSchema = Body(...),
def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if len(data.url) > 0:
old = webhook.get(tenant_id=context.tenant_id, webhook_id=integrationId)
@ -161,11 +152,6 @@ def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] =
m_key=key, project_id=projectId)}
@public_app.get('/general_stats', tags=["private"], include_in_schema=False)
def get_general_stats():
return {"data": {"sessions:": sessions.count_all()}}
@app.get('/projects', tags=['projects'])
def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True,
@ -266,8 +252,8 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun
context: schemas_ee.CurrentContext = Depends(OR_context)):
data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId)
if data is None:
data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId, full_data=True,
include_fav_viewed=True, group_metadata=True, live=False)
data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId,
full_data=True, include_fav_viewed=True, group_metadata=True, live=False)
if data is None:
return {"errors": ["session not found"]}
if data.get("inDB"):
@ -420,13 +406,21 @@ def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = D
return data
@app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"])
@app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"],
dependencies=[OR_scope(Permissions.session_replay)])
def share_note_to_slack(projectId: int, noteId: int, webhookId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId)
@app.get('/{projectId}/notes/{noteId}/msteams/{webhookId}', tags=["sessions", "notes"])
def share_note_to_msteams(projectId: int, noteId: int, webhookId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return sessions_notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId)
@app.post('/{projectId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)])
def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
@ -436,3 +430,8 @@ def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
return data
return {'data': data}
@app.post('/{projectId}/click_maps/search', tags=["click maps"], dependencies=[OR_scope(Permissions.session_replay)])
def click_map_search(projectId: int, data: schemas.FlatClickMapSessionsSearch = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)}

View file

@ -1,8 +1,11 @@
from chalicelib.core import telemetry, unlock
from chalicelib.core import jobs
from chalicelib.core import weekly_report as weekly_report_script
from apscheduler.triggers.cron import CronTrigger
from apscheduler.triggers.interval import IntervalTrigger
from decouple import config
from chalicelib.core import jobs
from chalicelib.core import telemetry, unlock
from chalicelib.core import weekly_report as weekly_report_script
async def run_scheduled_jobs() -> None:
jobs.execute_jobs()
@ -26,11 +29,13 @@ cron_jobs = [
{"func": unlock_cron, "trigger": "cron", "hour": "*"},
]
SINGLE_CRONS = [{"func": telemetry_cron, "trigger": "cron", "day_of_week": "*"},
{"func": run_scheduled_jobs, "trigger": "interval", "seconds": 60, "misfire_grace_time": 20},
{"func": weekly_report, "trigger": "cron", "day_of_week": "mon", "hour": 5,
"misfire_grace_time": 60 * 60}
]
SINGLE_CRONS = [{"func": telemetry_cron, "trigger": CronTrigger(day_of_week="*"),
"misfire_grace_time": 60 * 60, "max_instances": 1},
{"func": run_scheduled_jobs, "trigger": IntervalTrigger(minutes=60),
"misfire_grace_time": 20, "max_instances": 1},
{"func": weekly_report, "trigger": CronTrigger(day_of_week="mon", hour=5),
"misfire_grace_time": 60 * 60, "max_instances": 1}
]
if config("LOCAL_CRONS", default=False, cast=bool):
cron_jobs += SINGLE_CRONS

View file

@ -1,10 +1,12 @@
from apscheduler.triggers.interval import IntervalTrigger
from chalicelib.utils import events_queue
def pg_events_queue() -> None:
async def pg_events_queue() -> None:
events_queue.global_queue.force_flush()
ee_cron_jobs = [
{"func": pg_events_queue, "trigger": "interval", "seconds": 60*5, "misfire_grace_time": 20},
]
{"func": pg_events_queue, "trigger": IntervalTrigger(minutes=5), "misfire_grace_time": 20, "max_instances": 1},
]

View file

@ -1,12 +1,12 @@
from chalicelib.core import roles, traces
from chalicelib.core import roles, traces, assist_records, sessions
from chalicelib.core import unlock, signals
from chalicelib.core import sessions_insights
from chalicelib.utils import assist_helper
unlock.check()
from or_dependencies import OR_context
from routers.base import get_routers
import schemas
import schemas_ee
from fastapi import Depends, Body
@ -14,7 +14,7 @@ public_app, app, app_apikey = get_routers()
@app.get('/client/roles', tags=["client", "roles"])
def get_roles(context: schemas.CurrentContext = Depends(OR_context)):
def get_roles(context: schemas_ee.CurrentContext = Depends(OR_context)):
return {
'data': roles.get_roles(tenant_id=context.tenant_id)
}
@ -22,7 +22,7 @@ def get_roles(context: schemas.CurrentContext = Depends(OR_context)):
@app.post('/client/roles', tags=["client", "roles"])
@app.put('/client/roles', tags=["client", "roles"])
def add_role(data: schemas_ee.RolePayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)):
def add_role(data: schemas_ee.RolePayloadSchema = Body(...), context: schemas_ee.CurrentContext = Depends(OR_context)):
data = roles.create(tenant_id=context.tenant_id, user_id=context.user_id, data=data)
if "errors" in data:
return data
@ -35,7 +35,7 @@ def add_role(data: schemas_ee.RolePayloadSchema = Body(...), context: schemas.Cu
@app.post('/client/roles/{roleId}', tags=["client", "roles"])
@app.put('/client/roles/{roleId}', tags=["client", "roles"])
def edit_role(roleId: int, data: schemas_ee.RolePayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
context: schemas_ee.CurrentContext = Depends(OR_context)):
data = roles.update(tenant_id=context.tenant_id, user_id=context.user_id, role_id=roleId, data=data)
if "errors" in data:
return data
@ -46,7 +46,7 @@ def edit_role(roleId: int, data: schemas_ee.RolePayloadSchema = Body(...),
@app.delete('/client/roles/{roleId}', tags=["client", "roles"])
def delete_role(roleId: int, context: schemas.CurrentContext = Depends(OR_context)):
def delete_role(roleId: int, context: schemas_ee.CurrentContext = Depends(OR_context)):
data = roles.delete(tenant_id=context.tenant_id, user_id=context.user_id, role_id=roleId)
if "errors" in data:
return data
@ -62,22 +62,73 @@ def get_assist_credentials():
@app.post('/trails', tags=["traces", "trails"])
def get_trails(data: schemas_ee.TrailSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
context: schemas_ee.CurrentContext = Depends(OR_context)):
return {
'data': traces.get_all(tenant_id=context.tenant_id, data=data)
}
@app.post('/trails/actions', tags=["traces", "trails"])
def get_available_trail_actions(context: schemas.CurrentContext = Depends(OR_context)):
def get_available_trail_actions(context: schemas_ee.CurrentContext = Depends(OR_context)):
return {'data': traces.get_available_actions(tenant_id=context.tenant_id)}
@app.put('/{projectId}/assist/save', tags=["assist"])
def sign_record_for_upload(projectId: int, data: schemas_ee.AssistRecordPayloadSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)):
if not sessions.session_exists(project_id=projectId, session_id=data.session_id):
return {"errors": ["Session not found"]}
return {"data": assist_records.presign_record(project_id=projectId, data=data, context=context)}
@app.put('/{projectId}/assist/save/done', tags=["assist"])
def save_record_after_upload(projectId: int, data: schemas_ee.AssistRecordSavePayloadSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)):
if not sessions.session_exists(project_id=projectId, session_id=data.session_id):
return {"errors": ["Session not found"]}
return {"data": {"URL": assist_records.save_record(project_id=projectId, data=data, context=context)}}
@app.post('/{projectId}/assist/records', tags=["assist"])
def search_records(projectId: int, data: schemas_ee.AssistRecordSearchPayloadSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)):
return {"data": assist_records.search_records(project_id=projectId, data=data, context=context)}
@app.get('/{projectId}/assist/records/{recordId}', tags=["assist"])
def get_record(projectId: int, recordId: int, context: schemas_ee.CurrentContext = Depends(OR_context)):
return {"data": assist_records.get_record(project_id=projectId, record_id=recordId, context=context)}
@app.post('/{projectId}/assist/records/{recordId}', tags=["assist"])
def update_record(projectId: int, recordId: int, data: schemas_ee.AssistRecordUpdatePayloadSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)):
result = assist_records.update_record(project_id=projectId, record_id=recordId, data=data, context=context)
if "errors" in result:
return result
return {"data": result}
@app.delete('/{projectId}/assist/records/{recordId}', tags=["assist"])
def delete_record(projectId: int, recordId: int, context: schemas_ee.CurrentContext = Depends(OR_context)):
result = assist_records.delete_record(project_id=projectId, record_id=recordId, context=context)
if "errors" in result:
return result
return {"data": result}
@app.post('/{projectId}/signals', tags=['signals'])
def send_interactions(projectId: int, data: schemas_ee.SignalsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
context: schemas_ee.CurrentContext = Depends(OR_context)):
data = signals.handle_frontend_signals_queued(project_id=projectId, user_id=context.user_id, data=data)
if "errors" in data:
return data
return {'data': data}
return {'data': data}
@app.post('/{projectId}/dashboard/insights', tags=["insights"])
@app.post('/{projectId}/dashboard/insights', tags=["insights"])
def sessions_search(projectId: int, data: schemas_ee.GetInsightsSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)):
return {'data': sessions_insights.fetch_selected(data=data, project_id=projectId)}

View file

@ -1,400 +0,0 @@
from fastapi import Body
import schemas
from chalicelib.core import metadata
from chalicelib.core import metrics
from chalicelib.utils import helper
from or_dependencies import OR_scope
from routers.base import get_routers
from schemas_ee import Permissions
public_app, app, app_apikey = get_routers([OR_scope(Permissions.metrics)])
@app.get('/{projectId}/dashboard/metadata', tags=["dashboard", "metrics"])
def get_metadata_map(projectId: int):
metamap = []
for m in metadata.get(project_id=projectId):
metamap.append({"name": m["key"], "key": f"metadata{m['index']}"})
return {"data": metamap}
@app.post('/{projectId}/dashboard/sessions', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/sessions', tags=["dashboard", "metrics"])
def get_dashboard_processed_sessions(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_processed_sessions(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/errors', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/errors', tags=["dashboard", "metrics"])
def get_dashboard_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_errors(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/errors_trend', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/errors_trend', tags=["dashboard", "metrics"])
def get_dashboard_errors_trend(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_errors_trend(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/application_activity', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/application_activity', tags=["dashboard", "metrics"])
def get_dashboard_application_activity(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_application_activity(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/page_metrics', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/page_metrics', tags=["dashboard", "metrics"])
def get_dashboard_page_metrics(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_page_metrics(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/user_activity', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/user_activity', tags=["dashboard", "metrics"])
def get_dashboard_user_activity(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_user_activity(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/performance', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/performance', tags=["dashboard", "metrics"])
def get_dashboard_performance(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_performance(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/slowest_images', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/slowest_images', tags=["dashboard", "metrics"])
def get_dashboard_slowest_images(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_slowest_images(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/missing_resources', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/missing_resources', tags=["dashboard", "metrics"])
def get_performance_sessions(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_missing_resources_trend(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/network', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/network', tags=["dashboard", "metrics"])
def get_network_widget(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_network(project_id=projectId, **data.dict())}
@app.get('/{projectId}/dashboard/{widget}/search', tags=["dashboard", "metrics"])
def get_dashboard_autocomplete(projectId: int, widget: str, q: str, type: str = "", platform: str = None,
key: str = ""):
if q is None or len(q) == 0:
return {"data": []}
q = '^' + q
if widget in ['performance']:
data = metrics.search(q, type, project_id=projectId,
platform=platform, performance=True)
elif widget in ['pages', 'pages_dom_buildtime', 'top_metrics', 'time_to_render',
'impacted_sessions_by_slow_pages', 'pages_response_time']:
data = metrics.search(q, type, project_id=projectId,
platform=platform, pages_only=True)
elif widget in ['resources_loading_time']:
data = metrics.search(q, type, project_id=projectId,
platform=platform, performance=False)
elif widget in ['time_between_events', 'events']:
data = metrics.search(q, type, project_id=projectId,
platform=platform, performance=False, events_only=True)
elif widget in ['metadata']:
data = metrics.search(q, None, project_id=projectId,
platform=platform, metadata=True, key=key)
else:
return {"errors": [f"unsupported widget: {widget}"]}
return {'data': data}
# 1
@app.post('/{projectId}/dashboard/slowest_resources', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/slowest_resources', tags=["dashboard", "metrics"])
def get_dashboard_slowest_resources(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_slowest_resources(project_id=projectId, **data.dict())}
# 2
@app.post('/{projectId}/dashboard/resources_loading_time', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resources_loading_time', tags=["dashboard", "metrics"])
def get_dashboard_resources(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_resources_loading_time(project_id=projectId, **data.dict())}
# 3
@app.post('/{projectId}/dashboard/pages_dom_buildtime', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/pages_dom_buildtime', tags=["dashboard", "metrics"])
def get_dashboard_pages_dom(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_pages_dom_build_time(project_id=projectId, **data.dict())}
# 4
@app.post('/{projectId}/dashboard/busiest_time_of_day', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/busiest_time_of_day', tags=["dashboard", "metrics"])
def get_dashboard_busiest_time_of_day(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_busiest_time_of_day(project_id=projectId, **data.dict())}
# 5
@app.post('/{projectId}/dashboard/sessions_location', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/sessions_location', tags=["dashboard", "metrics"])
def get_dashboard_sessions_location(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_sessions_location(project_id=projectId, **data.dict())}
# 6
@app.post('/{projectId}/dashboard/speed_location', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/speed_location', tags=["dashboard", "metrics"])
def get_dashboard_speed_location(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_speed_index_location(project_id=projectId, **data.dict())}
# 7
@app.post('/{projectId}/dashboard/pages_response_time', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/pages_response_time', tags=["dashboard", "metrics"])
def get_dashboard_pages_response_time(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_pages_response_time(project_id=projectId, **data.dict())}
# 8
@app.post('/{projectId}/dashboard/pages_response_time_distribution', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/pages_response_time_distribution', tags=["dashboard", "metrics"])
def get_dashboard_pages_response_time_distribution(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_pages_response_time_distribution(project_id=projectId, **data.dict())}
# 9
@app.post('/{projectId}/dashboard/top_metrics', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/top_metrics', tags=["dashboard", "metrics"])
def get_dashboard_top_metrics(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_top_metrics(project_id=projectId, **data.dict())}
# 10
@app.post('/{projectId}/dashboard/time_to_render', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/time_to_render', tags=["dashboard", "metrics"])
def get_dashboard_time_to_render(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_time_to_render(project_id=projectId, **data.dict())}
# 11
@app.post('/{projectId}/dashboard/impacted_sessions_by_slow_pages', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/impacted_sessions_by_slow_pages', tags=["dashboard", "metrics"])
def get_dashboard_impacted_sessions_by_slow_pages(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_impacted_sessions_by_slow_pages(project_id=projectId, **data.dict())}
# 12
@app.post('/{projectId}/dashboard/memory_consumption', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/memory_consumption', tags=["dashboard", "metrics"])
def get_dashboard_memory_consumption(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_memory_consumption(project_id=projectId, **data.dict())}
# 12.1
@app.post('/{projectId}/dashboard/fps', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/fps', tags=["dashboard", "metrics"])
def get_dashboard_avg_fps(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_avg_fps(project_id=projectId, **data.dict())}
# 12.2
@app.post('/{projectId}/dashboard/cpu', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/cpu', tags=["dashboard", "metrics"])
def get_dashboard_avg_cpu(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_avg_cpu(project_id=projectId, **data.dict())}
# 13
@app.post('/{projectId}/dashboard/crashes', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/crashes', tags=["dashboard", "metrics"])
def get_dashboard_impacted_sessions_by_slow_pages(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_crashes(project_id=projectId, **data.dict())}
# 14
@app.post('/{projectId}/dashboard/domains_errors', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/domains_errors', tags=["dashboard", "metrics"])
def get_dashboard_domains_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_domains_errors(project_id=projectId, **data.dict())}
# 14.1
@app.post('/{projectId}/dashboard/domains_errors_4xx', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/domains_errors_4xx', tags=["dashboard", "metrics"])
def get_dashboard_domains_errors_4xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_domains_errors_4xx(project_id=projectId, **data.dict())}
# 14.2
@app.post('/{projectId}/dashboard/domains_errors_5xx', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/domains_errors_5xx', tags=["dashboard", "metrics"])
def get_dashboard_domains_errors_5xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_domains_errors_5xx(project_id=projectId, **data.dict())}
# 15
@app.post('/{projectId}/dashboard/slowest_domains', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/slowest_domains', tags=["dashboard", "metrics"])
def get_dashboard_slowest_domains(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_slowest_domains(project_id=projectId, **data.dict())}
# 16
@app.post('/{projectId}/dashboard/errors_per_domains', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/errors_per_domains', tags=["dashboard", "metrics"])
def get_dashboard_errors_per_domains(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_errors_per_domains(project_id=projectId, **data.dict())}
# 17
@app.post('/{projectId}/dashboard/sessions_per_browser', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/sessions_per_browser', tags=["dashboard", "metrics"])
def get_dashboard_sessions_per_browser(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_sessions_per_browser(project_id=projectId, **data.dict())}
# 18
@app.post('/{projectId}/dashboard/calls_errors', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/calls_errors', tags=["dashboard", "metrics"])
def get_dashboard_calls_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_calls_errors(project_id=projectId, **data.dict())}
# 18.1
@app.post('/{projectId}/dashboard/calls_errors_4xx', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/calls_errors_4xx', tags=["dashboard", "metrics"])
def get_dashboard_calls_errors_4xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_calls_errors_4xx(project_id=projectId, **data.dict())}
# 18.2
@app.post('/{projectId}/dashboard/calls_errors_5xx', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/calls_errors_5xx', tags=["dashboard", "metrics"])
def get_dashboard_calls_errors_5xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_calls_errors_5xx(project_id=projectId, **data.dict())}
# 19
@app.post('/{projectId}/dashboard/errors_per_type', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/errors_per_type', tags=["dashboard", "metrics"])
def get_dashboard_errors_per_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_errors_per_type(project_id=projectId, **data.dict())}
# 20
@app.post('/{projectId}/dashboard/resources_by_party', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resources_by_party', tags=["dashboard", "metrics"])
def get_dashboard_resources_by_party(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_resources_by_party(project_id=projectId, **data.dict())}
# 21
@app.post('/{projectId}/dashboard/resource_type_vs_response_end', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resource_type_vs_response_end', tags=["dashboard", "metrics"])
def get_dashboard_errors_per_resource_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.resource_type_vs_response_end(project_id=projectId, **data.dict())}
# 22
@app.post('/{projectId}/dashboard/resources_vs_visually_complete', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resources_vs_visually_complete', tags=["dashboard", "metrics"])
def get_dashboard_resources_vs_visually_complete(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_resources_vs_visually_complete(project_id=projectId, **data.dict())}
# 23
@app.post('/{projectId}/dashboard/impacted_sessions_by_js_errors', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/impacted_sessions_by_js_errors', tags=["dashboard", "metrics"])
def get_dashboard_impacted_sessions_by_js_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_impacted_sessions_by_js_errors(project_id=projectId, **data.dict())}
# 24
@app.post('/{projectId}/dashboard/resources_count_by_type', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resources_count_by_type', tags=["dashboard", "metrics"])
def get_dashboard_resources_count_by_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": metrics.get_resources_count_by_type(project_id=projectId, **data.dict())}
# # 25
# @app.post('/{projectId}/dashboard/time_between_events', tags=["dashboard", "metrics"])
# @app.get('/{projectId}/dashboard/time_between_events', tags=["dashboard", "metrics"])
# def get_dashboard_resources_count_by_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
# return {"errors": ["please choose 2 events"]}
@app.post('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"])
def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
results = [
{"key": "count_sessions",
"data": metrics.get_processed_sessions(project_id=projectId, **data.dict())},
*helper.explode_widget(data={**metrics.get_application_activity(project_id=projectId, **data.dict()),
"chart": metrics.get_performance(project_id=projectId, **data.dict())
.get("chart", [])}),
*helper.explode_widget(data=metrics.get_page_metrics(project_id=projectId, **data.dict())),
*helper.explode_widget(data=metrics.get_user_activity(project_id=projectId, **data.dict())),
{"key": "avg_pages_dom_buildtime",
"data": metrics.get_pages_dom_build_time(project_id=projectId, **data.dict())},
{"key": "avg_pages_response_time",
"data": metrics.get_pages_response_time(project_id=projectId, **data.dict())
},
*helper.explode_widget(metrics.get_top_metrics(project_id=projectId, **data.dict())),
{"key": "avg_time_to_render", "data": metrics.get_time_to_render(project_id=projectId, **data.dict())},
{"key": "avg_used_js_heap_size", "data": metrics.get_memory_consumption(project_id=projectId, **data.dict())},
{"key": "avg_cpu", "data": metrics.get_avg_cpu(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_fps,
"data": metrics.get_avg_fps(project_id=projectId, **data.dict())}
]
results = sorted(results, key=lambda r: r["key"])
return {"data": results}
@app.post('/{projectId}/dashboard/overview2', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/overview2', tags=["dashboard", "metrics"])
def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
results = [
{"key": schemas.TemplatePredefinedKeys.count_sessions,
"data": metrics.get_processed_sessions(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_image_load_time,
"data": metrics.get_application_activity_avg_image_load_time(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_page_load_time,
"data": metrics.get_application_activity_avg_page_load_time(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_request_load_time,
"data": metrics.get_application_activity_avg_request_load_time(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_dom_content_load_start,
"data": metrics.get_page_metrics_avg_dom_content_load_start(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_first_contentful_pixel,
"data": metrics.get_page_metrics_avg_first_contentful_pixel(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_visited_pages,
"data": metrics.get_user_activity_avg_visited_pages(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_session_duration,
"data": metrics.get_user_activity_avg_session_duration(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_pages_dom_buildtime,
"data": metrics.get_pages_dom_build_time(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_pages_response_time,
"data": metrics.get_pages_response_time(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_response_time,
"data": metrics.get_top_metrics_avg_response_time(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_first_paint,
"data": metrics.get_top_metrics_avg_first_paint(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_dom_content_loaded,
"data": metrics.get_top_metrics_avg_dom_content_loaded(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_till_first_bit,
"data": metrics.get_top_metrics_avg_till_first_bit(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_time_to_interactive,
"data": metrics.get_top_metrics_avg_time_to_interactive(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.count_requests,
"data": metrics.get_top_metrics_count_requests(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_time_to_render,
"data": metrics.get_time_to_render(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_used_js_heap_size,
"data": metrics.get_memory_consumption(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_cpu,
"data": metrics.get_avg_cpu(project_id=projectId, **data.dict())},
{"key": schemas.TemplatePredefinedKeys.avg_fps,
"data": metrics.get_avg_fps(project_id=projectId, **data.dict())}
]
results = sorted(results, key=lambda r: r["key"])
return {"data": results}

View file

@ -1,6 +1,9 @@
from typing import Union
from fastapi import Body, Depends
import schemas
import schemas_ee
from chalicelib.core import dashboards, custom_metrics, funnels
from or_dependencies import OR_context, OR_scope
from routers.base import get_routers
@ -47,11 +50,12 @@ def pin_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentCont
return {"data": dashboards.pin_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)}
@app.post('/{projectId}/dashboards/{dashboardId}/cards', tags=["cards"])
@app.post('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"])
@app.put('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"])
def add_widget_to_dashboard(projectId: int, dashboardId: int,
data: schemas.AddWidgetToDashboardPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def add_card_to_dashboard(projectId: int, dashboardId: int,
data: schemas.AddWidgetToDashboardPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.add_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
data=data)}
@ -59,7 +63,7 @@ def add_widget_to_dashboard(projectId: int, dashboardId: int,
@app.post('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"])
@app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"])
def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int,
data: schemas.CreateCustomMetricsSchema = Body(...),
data: schemas_ee.CreateCardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.create_metric_add_widget(project_id=projectId, user_id=context.user_id,
dashboard_id=dashboardId, data=data)}
@ -81,43 +85,41 @@ def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int
widget_id=widgetId)
@app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}/chart', tags=["dashboard"])
def get_widget_chart(projectId: int, dashboardId: int, widgetId: int,
data: schemas.CustomMetricChartPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = dashboards.make_chart_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
widget_id=widgetId, data=data)
if data is None:
return {"errors": ["widget not found"]}
return {"data": data}
@app.get('/{projectId}/metrics/templates', tags=["dashboard"])
def get_templates(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.get_templates(project_id=projectId, user_id=context.user_id)}
# @app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}/chart', tags=["dashboard"])
# def get_widget_chart(projectId: int, dashboardId: int, widgetId: int,
# data: schemas.CardChartSchema = Body(...),
# context: schemas.CurrentContext = Depends(OR_context)):
# data = dashboards.make_chart_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
# widget_id=widgetId, data=data)
# if data is None:
# return {"errors": ["widget not found"]}
# return {"data": data}
@app.post('/{projectId}/cards/try', tags=["cards"])
@app.post('/{projectId}/metrics/try', tags=["dashboard"])
@app.put('/{projectId}/metrics/try', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"])
def try_custom_metric(projectId: int, data: schemas.TryCustomMetricsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def try_card(projectId: int, data: schemas_ee.CreateCardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.merged_live(project_id=projectId, data=data, user_id=context.user_id)}
@app.post('/{projectId}/cards/try/sessions', tags=["cards"])
@app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"])
def try_custom_metric_sessions(projectId: int, data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.try_sessions(project_id=projectId, user_id=context.user_id, data=data)
return {"data": data}
@app.post('/{projectId}/cards/try/issues', tags=["cards"])
@app.post('/{projectId}/metrics/try/issues', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try/issues', tags=["customMetrics"])
def try_custom_metric_funnel_issues(projectId: int, data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def try_card_funnel_issues(projectId: int, data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if len(data.series) == 0:
return {"data": []}
data.series[0].filter.startDate = data.startTimestamp
@ -126,46 +128,72 @@ def try_custom_metric_funnel_issues(projectId: int, data: schemas.CustomMetricSe
return {"data": data}
@app.get('/{projectId}/cards', tags=["cards"])
@app.get('/{projectId}/metrics', tags=["dashboard"])
@app.get('/{projectId}/custom_metrics', tags=["customMetrics"])
def get_cards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)}
@app.post('/{projectId}/cards', tags=["cards"])
@app.post('/{projectId}/metrics', tags=["dashboard"])
@app.put('/{projectId}/metrics', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics', tags=["customMetrics"])
def add_custom_metric(projectId: int, data: schemas.CreateCustomMetricsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def create_card(projectId: int, data: schemas_ee.CreateCardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return custom_metrics.create(project_id=projectId, user_id=context.user_id, data=data)
@app.get('/{projectId}/metrics', tags=["dashboard"])
@app.get('/{projectId}/custom_metrics', tags=["customMetrics"])
def get_custom_metrics(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)}
@app.post('/{projectId}/cards/search', tags=["cards"])
@app.post('/{projectId}/metrics/search', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/search', tags=["customMetrics"])
def search_cards(projectId: int, data: schemas.SearchCardsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.search_all(project_id=projectId, user_id=context.user_id, data=data)}
@app.get('/{projectId}/cards/{metric_id}', tags=["cards"])
@app.get('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.get('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def get_custom_metric(projectId: int, metric_id: str, context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get(project_id=projectId, user_id=context.user_id, metric_id=metric_id)
def get_card(projectId: int, metric_id: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)):
if not isinstance(metric_id, int):
return {"errors": ["invalid card_id"]}
data = custom_metrics.get_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id)
if data is None:
return {"errors": ["custom metric not found"]}
return {"errors": ["card not found"]}
return {"data": data}
# @app.get('/{projectId}/cards/{metric_id}/thumbnail', tags=["cards"])
# def sign_thumbnail_for_upload(projectId: int, metric_id: Union[int, str],
# context: schemas.CurrentContext = Depends(OR_context)):
# if not isinstance(metric_id, int):
# return {"errors": ["invalid card_id"]}
# return custom_metrics.add_thumbnail(metric_id=metric_id, user_id=context.user_id, project_id=projectId)
@app.post('/{projectId}/cards/{metric_id}/sessions', tags=["cards"])
@app.post('/{projectId}/metrics/{metric_id}/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/sessions', tags=["customMetrics"])
def get_custom_metric_sessions(projectId: int, metric_id: int,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def get_card_sessions(projectId: int, metric_id: int,
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_sessions(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
if data is None:
return {"errors": ["custom metric not found"]}
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/issues', tags=["cards"])
@app.post('/{projectId}/metrics/{metric_id}/issues', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/issues', tags=["customMetrics"])
def get_custom_metric_funnel_issues(projectId: int, metric_id: int,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
def get_card_funnel_issues(projectId: int, metric_id: Union[int, str],
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if not isinstance(metric_id, int):
return {"errors": [f"invalid card_id: {metric_id}"]}
data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data)
if data is None:
@ -173,10 +201,11 @@ def get_custom_metric_funnel_issues(projectId: int, metric_id: int,
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/issues/{issueId}/sessions', tags=["customMetrics"])
def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_funnel_sessions_by_issue(project_id=projectId, user_id=context.user_id,
metric_id=metric_id, issue_id=issueId, data=data)
@ -185,10 +214,11 @@ def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: st
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/errors', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"])
def get_custom_metric_errors_list(projectId: int, metric_id: int,
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data)
@ -197,22 +227,22 @@ def get_custom_metric_errors_list(projectId: int, metric_id: int,
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/chart', tags=["card"])
@app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"])
def get_custom_metric_chart(projectId: int, metric_id: int, data: schemas.CustomMetricChartPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = dashboards.make_chart_metrics(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data)
if data is None:
return {"errors": ["custom metric not found"]}
def get_card_chart(projectId: int, metric_id: int, data: schemas.CardChartSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.make_chart_from_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data)
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.put('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCustomMetricsSchema = Body(...),
def update_custom_metric(projectId: int, metric_id: int, data: schemas_ee.UpdateCardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.update(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
if data is None:
@ -220,6 +250,7 @@ def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCus
return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/status', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"])
@app.put('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"])
@ -232,6 +263,7 @@ def update_custom_metric_state(projectId: int, metric_id: int,
status=data.active)}
@app.delete('/{projectId}/cards/{metric_id}', tags=["dashboard"])
@app.delete('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def delete_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)):

View file

@ -1,10 +1,11 @@
from typing import Optional, List, Literal
from enum import Enum
from typing import Optional, List, Union, Literal
from pydantic import BaseModel, Field, EmailStr
from pydantic import root_validator
import schemas
from chalicelib.utils.TimeUTC import TimeUTC
from enum import Enum
class Permissions(str, Enum):
@ -39,6 +40,23 @@ class SignalsSchema(BaseModel):
data: dict = Field(default={})
class InsightCategories(str, Enum):
errors = "errors"
network = "network"
rage = "rage"
resources = "resources"
class GetInsightsSchema(BaseModel):
startTimestamp: int = Field(default=TimeUTC.now(-7))
endTimestamp: int = Field(default=TimeUTC.now())
metricValue: List[InsightCategories] = Field(default=[])
series: List[schemas.CardCreateSeriesSchema] = Field(default=[])
class Config:
alias_generator = schemas.attribute_to_camel_case
class CreateMemberSchema(schemas.CreateMemberSchema):
roleId: Optional[int] = Field(None)
@ -89,3 +107,67 @@ class SessionModel(BaseModel):
userDeviceType: str
userAnonymousId: Optional[str]
metadata: dict = Field(default={})
class AssistRecordUpdatePayloadSchema(BaseModel):
name: str = Field(..., min_length=1)
class AssistRecordPayloadSchema(AssistRecordUpdatePayloadSchema):
duration: int = Field(...)
session_id: int = Field(...)
class Config:
alias_generator = schemas.attribute_to_camel_case
class AssistRecordSavePayloadSchema(AssistRecordPayloadSchema):
key: str = Field(...)
class AssistRecordSearchPayloadSchema(schemas._PaginatedSchema):
limit: int = Field(default=200, gt=0)
startDate: int = Field(default=TimeUTC.now(-7))
endDate: int = Field(default=TimeUTC.now(1))
user_id: Optional[int] = Field(default=None)
query: Optional[str] = Field(default=None)
order: Literal["asc", "desc"] = Field(default="desc")
class Config:
alias_generator = schemas.attribute_to_camel_case
# TODO: move these to schema when Insights is supported on PG
class MetricOfInsights(str, Enum):
issue_categories = "issueCategories"
class CreateCardSchema(schemas.CreateCardSchema):
metric_of: Union[schemas.MetricOfTimeseries, schemas.MetricOfTable, \
schemas.MetricOfErrors, schemas.MetricOfPerformance, \
schemas.MetricOfResources, schemas.MetricOfWebVitals, \
schemas.MetricOfClickMap, MetricOfInsights] = Field(default=schemas.MetricOfTable.user_id)
metric_value: List[Union[schemas.IssueType, InsightCategories]] = Field(default=[])
@root_validator
def restrictions(cls, values):
return values
@root_validator
def validator(cls, values):
values = super().validator(values)
if values.get("metric_type") == schemas.MetricType.insights:
assert values.get("view_type") == schemas.MetricOtherViewType.list_chart, \
f"viewType must be 'list' for metricOf:{values.get('metric_of')}"
assert isinstance(values.get("metric_of"), MetricOfInsights), \
f"metricOf must be of type {MetricOfInsights} for metricType:{schemas.MetricType.insights}"
if values.get("metric_value") is not None and len(values.get("metric_value")) > 0:
for i in values.get("metric_value"):
assert isinstance(i, InsightCategories), \
f"metricValue should be of type [InsightCategories] for metricType:{schemas.MetricType.insights}"
return values
class UpdateCardSchema(CreateCardSchema):
series: List[schemas.CardUpdateSeriesSchema] = Field(...)

View file

@ -0,0 +1,13 @@
BEGIN;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.8.3-ee'
$$ LANGUAGE sql IMMUTABLE;
ALTER TABLE IF EXISTS public.webhooks
ALTER COLUMN type SET DEFAULT 'webhook';
ALTER TYPE webhook_type ADD VALUE IF NOT EXISTS 'msteams';
COMMIT;

View file

@ -0,0 +1,2 @@
ALTER TABLE experimental.sessions
MODIFY COLUMN user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122,'BU'=123, 'VD'=124, 'YD'=125, 'DD'=126, 'BU'=127);

View file

@ -129,7 +129,7 @@ CREATE TABLE IF NOT EXISTS experimental.sessions
user_browser_version LowCardinality(Nullable(String)),
user_device Nullable(String),
user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2),
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122),
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122,'BU'=123, 'VD'=124, 'YD'=125, 'DD'=126, 'BU'=127),
platform Enum8('web'=1,'ios'=2,'android'=3) DEFAULT 'web',
datetime DateTime,
duration UInt32,
@ -212,7 +212,7 @@ CREATE TABLE IF NOT EXISTS experimental.issues
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.events_l7d_mv
ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, event_type, session_id, message_id)
TTL datetime + INTERVAL 7 DAY
POPULATE
@ -285,7 +285,7 @@ WHERE datetime >= now() - INTERVAL 7 DAY;
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.resources_l7d_mv
ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, type, session_id, message_id)
TTL datetime + INTERVAL 7 DAY
POPULATE
@ -362,34 +362,34 @@ WHERE datetime >= now() - INTERVAL 7 DAY
AND isNotNull(duration)
AND duration > 0;
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.js_errors_sessions_mv
ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, error_id, session_id)
TTL _timestamp + INTERVAL 35 DAY
POPULATE
AS
SELECT session_id,
project_id,
events.datetime AS datetime,
event_type,
assumeNotNull(error_id) AS error_id,
source,
name,
message,
error_tags_keys,
error_tags_values,
message_id,
user_id,
user_browser,
user_browser_version,
user_os,
user_os_version,
user_device_type,
user_device,
user_country,
_timestamp
FROM experimental.events
INNER JOIN experimental.sessions USING (session_id)
WHERE event_type = 'ERROR'
AND source = 'js_exception';
-- CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.js_errors_sessions_mv
-- ENGINE = ReplacingMergeTree(_timestamp)
-- PARTITION BY toYYYYMM(datetime)
-- ORDER BY (project_id, datetime, event_type, error_id, session_id)
-- TTL _timestamp + INTERVAL 35 DAY
-- POPULATE
-- AS
-- SELECT session_id,
-- project_id,
-- events.datetime AS datetime,
-- event_type,
-- assumeNotNull(error_id) AS error_id,
-- source,
-- name,
-- message,
-- error_tags_keys,
-- error_tags_values,
-- message_id,
-- user_id,
-- user_browser,
-- user_browser_version,
-- user_os,
-- user_os_version,
-- user_device_type,
-- user_device,
-- user_country,
-- _timestamp
-- FROM experimental.events
-- INNER JOIN experimental.sessions USING (session_id)
-- WHERE event_type = 'ERROR'
-- AND source = 'js_exception';

View file

@ -7,14 +7,327 @@ $$ LANGUAGE sql IMMUTABLE;
CREATE TABLE IF NOT EXISTS frontend_signals
(
project_id bigint NOT NULL,
user_id integer NOT NULL references users (user_id) ON DELETE CASCADE,
timestamp bigint NOT NULL,
action text NOT NULL,
source text NOT NULL,
category text NOT NULL,
data jsonb
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
timestamp bigint NOT NULL,
action text NOT NULL,
source text NOT NULL,
category text NOT NULL,
data jsonb,
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL
);
CREATE INDEX IF NOT EXISTS frontend_signals_user_id_idx ON frontend_signals (user_id);
CREATE TABLE IF NOT EXISTS assist_records
(
record_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL,
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE SET NULL,
created_at bigint NOT NULL DEFAULT (EXTRACT(EPOCH FROM now() at time zone 'utc') * 1000)::bigint,
deleted_at timestamp without time zone NULL DEFAULT NULL,
name text NOT NULL,
file_key text NOT NULL,
duration integer NOT NULL
);
ALTER TYPE webhook_type ADD VALUE IF NOT EXISTS 'msteams';
UPDATE metrics
SET is_public= TRUE;
ALTER TABLE IF EXISTS metrics
ALTER COLUMN metric_type TYPE text,
ALTER COLUMN metric_type SET DEFAULT 'timeseries',
ALTER COLUMN view_type TYPE text,
ALTER COLUMN view_type SET DEFAULT 'lineChart',
ADD COLUMN IF NOT EXISTS thumbnail text;
DO
$$
BEGIN
IF EXISTS(SELECT column_name
FROM information_schema.columns
WHERE table_name = 'metrics'
and column_name = 'is_predefined') THEN
-- 1. pre transform structure
ALTER TABLE IF EXISTS metrics
ADD COLUMN IF NOT EXISTS o_metric_id INTEGER,
ADD COLUMN IF NOT EXISTS o_widget_id INTEGER;
-- 2. insert predefined metrics related to dashboards as custom metrics
INSERT INTO metrics(project_id, user_id, name, metric_type, view_type, metric_of, metric_value,
metric_format, default_config, is_public, o_metric_id, o_widget_id)
SELECT dashboards.project_id,
dashboard_widgets.user_id,
metrics.name,
left(category, 1) || right(replace(initcap(category), ' ', ''), -1) AS metric_type,
'chart' AS view_type,
left(predefined_key, 1) || right(replace(initcap(predefined_key), '_', ''), -1) AS metric_of,
metric_value,
metric_format,
default_config,
TRUE AS is_public,
metrics.metric_id,
dashboard_widgets.widget_id
FROM metrics
INNER JOIN dashboard_widgets USING (metric_id)
INNER JOIN dashboards USING (dashboard_id)
WHERE is_predefined;
-- 3. update widgets
UPDATE dashboard_widgets
SET metric_id=metrics.metric_id
FROM metrics
WHERE metrics.o_widget_id IS NOT NULL
AND dashboard_widgets.widget_id = metrics.o_widget_id;
-- 4. delete predefined metrics
DELETE
FROM metrics
WHERE is_predefined;
ALTER TABLE IF EXISTS metrics
DROP COLUMN IF EXISTS active,
DROP COLUMN IF EXISTS is_predefined,
DROP COLUMN IF EXISTS is_template,
DROP COLUMN IF EXISTS category,
DROP COLUMN IF EXISTS o_metric_id,
DROP COLUMN IF EXISTS o_widget_id,
DROP CONSTRAINT IF EXISTS null_project_id_for_template_only,
DROP CONSTRAINT IF EXISTS metrics_unique_key,
DROP CONSTRAINT IF EXISTS unique_key;
END IF;
END;
$$
LANGUAGE plpgsql;
DROP TYPE IF EXISTS metric_type;
DROP TYPE IF EXISTS metric_view_type;
ALTER TABLE IF EXISTS events.clicks
ADD COLUMN IF NOT EXISTS path text;
DROP INDEX IF EXISTS events.clicks_url_gin_idx;
DROP INDEX IF EXISTS events.inputs_label_value_idx;
DROP INDEX IF EXISTS events.inputs_label_idx;
DROP INDEX IF EXISTS events.pages_base_path_idx;
DROP INDEX IF EXISTS events.pages_base_path_idx1;
DROP INDEX IF EXISTS events.pages_base_path_idx2;
DROP INDEX IF EXISTS events.pages_base_referrer_gin_idx1;
DROP INDEX IF EXISTS events.pages_base_referrer_gin_idx2;
DROP INDEX IF EXISTS events.resources_url_gin_idx;
DROP INDEX IF EXISTS events.resources_url_idx;
DROP INDEX IF EXISTS events.resources_url_hostpath_idx;
DROP INDEX IF EXISTS events.resources_session_id_timestamp_idx;
DROP INDEX IF EXISTS events.resources_duration_durationgt0_idx;
DROP INDEX IF EXISTS events.state_actions_name_idx;
DROP INDEX IF EXISTS events_common.requests_query_nn_idx;
DROP INDEX IF EXISTS events_common.requests_host_nn_idx;
DROP INDEX IF EXISTS events_common.issues_context_string_gin_idx;
DROP INDEX IF EXISTS public.sessions_user_country_gin_idx;
DROP INDEX IF EXISTS public.sessions_user_browser_gin_idx;
DROP INDEX IF EXISTS public.sessions_user_os_gin_idx;
DROP INDEX IF EXISTS public.issues_context_string_gin_idx;
ALTER TABLE IF EXISTS projects
ADD COLUMN IF NOT EXISTS beacon_size integer NOT NULL DEFAULT 0;
-- To migrate saved search data
-- SET client_min_messages TO NOTICE;
-- SET client_min_messages TO NOTICE;
CREATE OR REPLACE FUNCTION get_new_event_key(key text)
RETURNS text AS
$$
DECLARE
events_map CONSTANT JSONB := '{
"CLICK": "click",
"INPUT": "input",
"LOCATION": "location",
"CUSTOM": "custom",
"REQUEST": "request",
"FETCH": "fetch",
"GRAPHQL": "graphql",
"STATEACTION": "stateAction",
"ERROR": "error",
"CLICK_IOS": "clickIos",
"INPUT_IOS": "inputIos",
"VIEW_IOS": "viewIos",
"CUSTOM_IOS": "customIos",
"REQUEST_IOS": "requestIos",
"ERROR_IOS": "errorIos",
"DOM_COMPLETE": "domComplete",
"LARGEST_CONTENTFUL_PAINT_TIME": "largestContentfulPaintTime",
"TIME_BETWEEN_EVENTS": "timeBetweenEvents",
"TTFB": "ttfb",
"AVG_CPU_LOAD": "avgCpuLoad",
"AVG_MEMORY_USAGE": "avgMemoryUsage",
"FETCH_FAILED": "fetchFailed"
}';
BEGIN
RETURN jsonb_extract_path(events_map, key);
END;
$$ LANGUAGE plpgsql IMMUTABLE;
CREATE OR REPLACE FUNCTION get_new_event_filter_key(key text)
RETURNS text AS
$$
DECLARE
event_filters_map CONSTANT JSONB := '{
"FETCH_URL": "fetchUrl",
"FETCH_STATUS_CODE": "fetchStatusCode",
"FETCH_METHOD": "fetchMethod",
"FETCH_DURATION": "fetchDuration",
"FETCH_REQUEST_BODY": "fetchRequestBody",
"FETCH_RESPONSE_BODY": "fetchResponseBody",
"GRAPHQL_NAME": "graphqlName",
"GRAPHQL_METHOD": "graphqlMethod",
"GRAPHQL_REQUEST_BODY": "graphqlRequestBody",
"GRAPHQL_RESPONSE_BODY": "graphqlResponseBody"
}';
BEGIN
RETURN jsonb_extract_path(event_filters_map, key);
END;
$$ LANGUAGE plpgsql IMMUTABLE;
CREATE OR REPLACE FUNCTION get_new_filter_key(key text)
RETURNS text AS
$$
DECLARE
filters_map CONSTANT JSONB := '{
"USEROS": "userOs",
"USERBROWSER": "userBrowser",
"USERDEVICE": "userDevice",
"USERCOUNTRY": "userCountry",
"USERID": "userId",
"USERANONYMOUSID": "userAnonymousId",
"REFERRER": "referrer",
"REVID": "revId",
"USEROS_IOS": "userOsIos",
"USERDEVICE_IOS": "userDeviceIos",
"USERCOUNTRY_IOS": "userCountryIos",
"USERID_IOS": "userIdIos",
"USERANONYMOUSID_IOS": "userAnonymousIdIos",
"REVID_IOS": "revIdIos",
"DURATION": "duration",
"PLATFORM": "platform",
"METADATA": "metadata",
"ISSUE": "issue",
"EVENTS_COUNT": "eventsCount",
"UTM_SOURCE": "utmSource",
"UTM_MEDIUM": "utmMedium",
"UTM_CAMPAIGN": "utmCampaign"
}';
BEGIN
RETURN jsonb_extract_path(filters_map, key);
END;
$$ LANGUAGE plpgsql IMMUTABLE;
DO
$$
DECLARE
row RECORD;
events_att JSONB;
event_filters_att JSONB;
filters_att JSONB;
element JSONB;
s_element JSONB;
new_value TEXT;
new_events JSONB[];
new_filters JSONB[];
new_event_filters JSONB[];
changed BOOLEAN;
planned_update JSONB[];
BEGIN
planned_update := '{}'::jsonb[];
FOR row IN SELECT * FROM searches
LOOP
-- Transform events attributes
events_att := row.filter -> 'events';
IF events_att IS NOT NULL THEN
new_events := '{}'::jsonb[];
FOR element IN SELECT jsonb_array_elements(events_att)
LOOP
changed := FALSE;
new_value := get_new_event_key(element ->> 'type');
if new_value IS NOT NULL THEN
changed := TRUE;
new_value := replace(new_value, '"', '');
element := element || jsonb_build_object('type', new_value);
END IF;
-- Transform event's sub-filters attributes
event_filters_att := element -> 'filters';
new_event_filters := '{}'::jsonb[];
IF event_filters_att IS NOT NULL AND jsonb_array_length(event_filters_att) > 0 THEN
FOR s_element IN SELECT jsonb_array_elements(event_filters_att)
LOOP
new_value := get_new_event_filter_key(s_element ->> 'type');
if new_value IS NOT NULL THEN
changed := TRUE;
new_value := replace(new_value, '"', '');
s_element := s_element || jsonb_build_object('type', new_value);
new_event_filters := array_append(new_event_filters, s_element);
END IF;
END LOOP;
element := element || jsonb_build_object('filters', new_event_filters);
END IF;
IF changed THEN
new_events := array_append(new_events, element);
END IF;
END LOOP;
IF array_length(new_events, 1) > 0 THEN
row.filter := row.filter || jsonb_build_object('events', new_events);
END IF;
END IF;
-- Transform filters attributes
filters_att := row.filter -> 'filters';
IF filters_att IS NOT NULL THEN
new_filters := '{}'::jsonb;
FOR element IN SELECT jsonb_array_elements(filters_att)
LOOP
new_value := get_new_filter_key(element ->> 'type');
if new_value IS NOT NULL THEN
new_value := replace(new_value, '"', '');
element := element || jsonb_build_object('type', new_value);
new_filters := array_append(new_filters, element);
END IF;
END LOOP;
IF array_length(new_filters, 1) > 0 THEN
row.filter := row.filter || jsonb_build_object('filters', new_filters);
END IF;
END IF;
IF array_length(new_events, 1) > 0 OR array_length(new_filters, 1) > 0 THEN
planned_update := array_append(planned_update,
jsonb_build_object('id', row.search_id, 'change', row.filter));
END IF;
END LOOP;
-- Update saved search
IF array_length(planned_update, 1) > 0 THEN
raise notice 'must update % elements',array_length(planned_update, 1);
UPDATE searches
SET filter=changes.change -> 'change'
FROM (SELECT unnest(planned_update)) AS changes(change)
WHERE search_id = (changes.change -> 'id')::integer;
raise notice 'update done';
ELSE
raise notice 'nothing to update';
END IF;
END ;
$$
LANGUAGE plpgsql;
COMMIT;
CREATE INDEX CONCURRENTLY IF NOT EXISTS clicks_selector_idx ON events.clicks (selector);
CREATE INDEX CONCURRENTLY IF NOT EXISTS clicks_path_idx ON events.clicks (path);
CREATE INDEX CONCURRENTLY IF NOT EXISTS clicks_path_gin_idx ON events.clicks USING GIN (path gin_trgm_ops);
CREATE INDEX CONCURRENTLY IF NOT EXISTS issues_project_id_issue_id_idx ON public.issues (project_id, issue_id);

View file

@ -77,6 +77,13 @@ DROP INDEX IF EXISTS events_common.requests_url_gin_idx2;
DROP INDEX IF EXISTS events.resources_url_gin_idx;
DROP INDEX IF EXISTS events.resources_url_idx;
UPDATE metrics
SET default_config=default_config || '{
"col": 4
}'::jsonb
WHERE NOT is_predefined
AND (metric_type = 'funnel' OR (metric_type = 'table' AND metric_of IN ('SESSIONS', 'js_exception')));
COMMIT;
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_session_id_status_code_nn_idx ON events_common.requests (session_id, status_code) WHERE status_code IS NOT NULL;

View file

@ -7,7 +7,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.9.0-ee'
SELECT 'v1.10.0-ee'
$$ LANGUAGE sql IMMUTABLE;
@ -131,7 +131,8 @@ $$
('user_viewed_sessions'),
('users'),
('webhooks'),
('sessions_notes'))
('sessions_notes'),
('assist_records'))
select bool_and(exists(select *
from information_schema.tables t
where table_schema = 'public'
@ -146,7 +147,7 @@ $$
tenant_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
tenant_key text NOT NULL DEFAULT generate_api_key(20),
name text NOT NULL,
api_key text UNIQUE default generate_api_key(20) not null,
api_key text UNIQUE DEFAULT generate_api_key(20) NOT NULL,
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
deleted_at timestamp without time zone NULL DEFAULT NULL,
license text NULL,
@ -186,9 +187,9 @@ $$
email text NOT NULL UNIQUE,
role user_role NOT NULL DEFAULT 'member',
name text NOT NULL,
created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
deleted_at timestamp without time zone NULL DEFAULT NULL,
api_key text UNIQUE default generate_api_key(20) not null,
api_key text UNIQUE DEFAULT generate_api_key(20) NOT NULL,
jwt_iat timestamp without time zone NULL DEFAULT NULL,
data jsonb NOT NULL DEFAULT'{}'::jsonb,
weekly_report boolean NOT NULL DEFAULT TRUE,
@ -256,7 +257,8 @@ $$
"defaultInputMode": "plain"
}'::jsonb,
first_recorded_session_at timestamp without time zone NULL DEFAULT NULL,
sessions_last_check_at timestamp without time zone NULL DEFAULT NULL
sessions_last_check_at timestamp without time zone NULL DEFAULT NULL,
beacon_size integer NOT NULL DEFAULT 0
);
@ -283,25 +285,25 @@ $$
IF NOT EXISTS(SELECT *
FROM pg_type typ
WHERE typ.typname = 'webhook_type') THEN
create type webhook_type as enum ('webhook','slack','email');
CREATE TYPE webhook_type AS ENUM ('webhook','slack','email','msteams');
END IF;
create table IF NOT EXISTS webhooks
CREATE TABLE IF NOT EXISTS webhooks
(
webhook_id integer generated by default as identity
webhook_id integer generated by DEFAULT as identity
constraint webhooks_pkey
primary key,
tenant_id integer not null
tenant_id integer NOT NULL
constraint webhooks_tenant_id_fkey
references tenants
on delete cascade,
endpoint text not null,
created_at timestamp default timezone('utc'::text, now()) not null,
endpoint text NOT NULL,
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
deleted_at timestamp,
auth_header text,
type webhook_type not null,
index integer default 0 not null,
type webhook_type NOT NULL DEFAULT 'webhook',
index integer DEFAULT 0 NOT NULL,
name varchar(100)
);
@ -339,9 +341,9 @@ $$
funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
name text not null,
filter jsonb not null,
created_at timestamp default timezone('utc'::text, now()) not null,
name text NOT NULL,
filter jsonb NOT NULL,
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
deleted_at timestamp,
is_public boolean NOT NULL DEFAULT False
);
@ -352,21 +354,21 @@ $$
IF NOT EXISTS(SELECT *
FROM pg_type typ
WHERE typ.typname = 'announcement_type') THEN
create type announcement_type as enum ('notification','alert');
CREATE TYPE announcement_type AS ENUM ('notification','alert');
END IF;
create table IF NOT EXISTS announcements
CREATE TABLE IF NOT EXISTS announcements
(
announcement_id serial not null
announcement_id serial NOT NULL
constraint announcements_pk
primary key,
title text not null,
description text not null,
title text NOT NULL,
description text NOT NULL,
button_text varchar(30),
button_url text,
image_url text,
created_at timestamp default timezone('utc'::text, now()) not null,
type announcement_type default 'notification'::announcement_type not null
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
type announcement_type DEFAULT 'notification'::announcement_type NOT NULL
);
IF NOT EXISTS(SELECT *
@ -395,14 +397,14 @@ $$
CREATE TABLE IF NOT EXISTS jira_cloud
(
user_id integer not null
user_id integer NOT NULL
constraint jira_cloud_pk
primary key
constraint jira_cloud_users_fkey
references users
on delete cascade,
username text not null,
token text not null,
username text NOT NULL,
token text NOT NULL,
url text
);
@ -441,7 +443,6 @@ $$
context jsonb DEFAULT NULL
);
CREATE INDEX IF NOT EXISTS issues_issue_id_type_idx ON issues (issue_id, type);
CREATE INDEX IF NOT EXISTS issues_context_string_gin_idx ON public.issues USING GIN (context_string gin_trgm_ops);
CREATE INDEX IF NOT EXISTS issues_project_id_issue_id_idx ON public.issues (project_id, issue_id);
CREATE INDEX IF NOT EXISTS issues_project_id_idx ON issues (project_id);
@ -591,12 +592,9 @@ $$
CREATE INDEX IF NOT EXISTS sessions_metadata8_gin_idx ON public.sessions USING GIN (metadata_8 gin_trgm_ops);
CREATE INDEX IF NOT EXISTS sessions_metadata9_gin_idx ON public.sessions USING GIN (metadata_9 gin_trgm_ops);
CREATE INDEX IF NOT EXISTS sessions_metadata10_gin_idx ON public.sessions USING GIN (metadata_10 gin_trgm_ops);
CREATE INDEX IF NOT EXISTS sessions_user_os_gin_idx ON public.sessions USING GIN (user_os gin_trgm_ops);
CREATE INDEX IF NOT EXISTS sessions_user_browser_gin_idx ON public.sessions USING GIN (user_browser gin_trgm_ops);
CREATE INDEX IF NOT EXISTS sessions_user_device_gin_idx ON public.sessions USING GIN (user_device gin_trgm_ops);
CREATE INDEX IF NOT EXISTS sessions_user_id_gin_idx ON public.sessions USING GIN (user_id gin_trgm_ops);
CREATE INDEX IF NOT EXISTS sessions_user_anonymous_id_gin_idx ON public.sessions USING GIN (user_anonymous_id gin_trgm_ops);
CREATE INDEX IF NOT EXISTS sessions_user_country_gin_idx ON public.sessions (project_id, user_country);
CREATE INDEX IF NOT EXISTS sessions_start_ts_idx ON public.sessions (start_ts) WHERE duration > 0;
CREATE INDEX IF NOT EXISTS sessions_project_id_idx ON public.sessions (project_id) WHERE duration > 0;
CREATE INDEX IF NOT EXISTS sessions_session_id_project_id_start_ts_idx ON sessions (session_id, project_id, start_ts) WHERE duration > 0;
@ -648,13 +646,14 @@ $$
CREATE TABLE IF NOT EXISTS frontend_signals
(
project_id bigint NOT NULL,
user_id text NOT NULL,
timestamp bigint NOT NULL,
action text NOT NULL,
source text NOT NULL,
category text NOT NULL,
data json
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
timestamp bigint NOT NULL,
action text NOT NULL,
source text NOT NULL,
category text NOT NULL,
data jsonb,
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL
);
CREATE INDEX IF NOT EXISTS frontend_signals_user_id_idx ON frontend_signals (user_id);
@ -665,8 +664,8 @@ $$
issue_id text NOT NULL,
provider oauth_provider NOT NULL,
created_by integer NOT NULL,
created_at timestamp default timezone('utc'::text, now()) NOT NULL,
provider_data jsonb default'{}'::jsonb NOT NULL
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
provider_data jsonb DEFAULT'{}'::jsonb NOT NULL
);
CREATE INDEX IF NOT EXISTS assigned_sessions_session_id_idx ON assigned_sessions (session_id);
@ -719,8 +718,8 @@ $$
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
action job_action NOT NULL,
reference_id text NOT NULL,
created_at timestamp default timezone('utc'::text, now()) NOT NULL,
updated_at timestamp default timezone('utc'::text, now()) NULL,
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
updated_at timestamp DEFAULT timezone('utc'::text, now()) NULL,
start_at timestamp NOT NULL,
errors text NULL
);
@ -748,37 +747,27 @@ $$
CREATE INDEX IF NOT EXISTS traces_created_at_idx ON traces (created_at);
CREATE INDEX IF NOT EXISTS traces_action_idx ON traces (action);
CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined','funnel');
CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart','areaChart','barChart','stackedBarChart','stackedBarLineChart','overview','map');
CREATE TABLE IF NOT EXISTS metrics
(
metric_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NULL REFERENCES projects (project_id) ON DELETE CASCADE,
user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
name text NOT NULL,
is_public boolean NOT NULL DEFAULT FALSE,
active boolean NOT NULL DEFAULT TRUE,
created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
project_id integer NULL REFERENCES projects (project_id) ON DELETE CASCADE,
user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
name text NOT NULL,
is_public boolean NOT NULL DEFAULT TRUE,
created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
deleted_at timestamp,
edited_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
metric_type metric_type NOT NULL DEFAULT 'timeseries',
view_type metric_view_type NOT NULL DEFAULT 'lineChart',
metric_of text NOT NULL DEFAULT 'sessionCount',
metric_value text[] NOT NULL DEFAULT '{}'::text[],
edited_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
metric_type text NOT NULL DEFAULT 'timeseries',
view_type text NOT NULL DEFAULT 'lineChart',
metric_of text NOT NULL DEFAULT 'sessionCount',
metric_value text[] NOT NULL DEFAULT '{}'::text[],
metric_format text,
category text NULL DEFAULT 'custom',
is_pinned boolean NOT NULL DEFAULT FALSE,
is_predefined boolean NOT NULL DEFAULT FALSE,
is_template boolean NOT NULL DEFAULT FALSE,
predefined_key text NULL DEFAULT NULL,
default_config jsonb NOT NULL DEFAULT '{
thumbnail text,
default_config jsonb NOT NULL DEFAULT '{
"col": 2,
"row": 2,
"position": 0
}'::jsonb,
CONSTRAINT null_project_id_for_template_only
CHECK ( (metrics.category != 'custom') != (metrics.project_id IS NOT NULL) ),
CONSTRAINT unique_key UNIQUE (predefined_key)
}'::jsonb
);
CREATE INDEX IF NOT EXISTS metrics_user_id_is_public_idx ON public.metrics (user_id, is_public);
CREATE TABLE IF NOT EXISTS metric_series
@ -822,9 +811,9 @@ $$
search_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
name text not null,
filter jsonb not null,
created_at timestamp default timezone('utc'::text, now()) not null,
name text NOT NULL,
filter jsonb NOT NULL,
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
deleted_at timestamp,
is_public boolean NOT NULL DEFAULT False
);
@ -876,7 +865,7 @@ $$
(
note_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
message text NOT NULL,
created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL,
deleted_at timestamp without time zone NULL DEFAULT NULL,
tag text NULL,
@ -940,15 +929,6 @@ $$
CREATE INDEX IF NOT EXISTS pages_timestamp_idx ON events.pages (timestamp);
CREATE INDEX IF NOT EXISTS pages_session_id_timestamp_idx ON events.pages (session_id, timestamp);
CREATE INDEX IF NOT EXISTS pages_base_referrer_idx ON events.pages (base_referrer);
CREATE INDEX IF NOT EXISTS pages_base_referrer_gin_idx2 ON events.pages USING GIN (RIGHT(base_referrer,
length(base_referrer) -
(CASE
WHEN base_referrer LIKE 'http://%'
THEN 7
WHEN base_referrer LIKE 'https://%'
THEN 8
ELSE 0 END))
gin_trgm_ops);
CREATE INDEX IF NOT EXISTS pages_response_time_idx ON events.pages (response_time);
CREATE INDEX IF NOT EXISTS pages_response_end_idx ON events.pages (response_end);
CREATE INDEX IF NOT EXISTS pages_path_gin_idx ON events.pages USING GIN (path gin_trgm_ops);
@ -986,6 +966,7 @@ $$
timestamp bigint NOT NULL,
label text DEFAULT NULL,
url text DEFAULT '' NOT NULL,
path text,
selector text DEFAULT '' NOT NULL,
PRIMARY KEY (session_id, message_id)
);
@ -995,9 +976,11 @@ $$
CREATE INDEX IF NOT EXISTS clicks_timestamp_idx ON events.clicks (timestamp);
CREATE INDEX IF NOT EXISTS clicks_label_session_id_timestamp_idx ON events.clicks (label, session_id, timestamp);
CREATE INDEX IF NOT EXISTS clicks_url_idx ON events.clicks (url);
CREATE INDEX IF NOT EXISTS clicks_url_gin_idx ON events.clicks USING GIN (url gin_trgm_ops);
CREATE INDEX IF NOT EXISTS clicks_url_session_id_timestamp_selector_idx ON events.clicks (url, session_id, timestamp, selector);
CREATE INDEX IF NOT EXISTS clicks_session_id_timestamp_idx ON events.clicks (session_id, timestamp);
CREATE INDEX IF NOT EXISTS clicks_selector_idx ON events.clicks (selector);
CREATE INDEX IF NOT EXISTS clicks_path_idx ON events.clicks (path);
CREATE INDEX IF NOT EXISTS clicks_path_gin_idx ON events.clicks USING GIN (path gin_trgm_ops);
CREATE TABLE IF NOT EXISTS events.inputs
@ -1010,9 +993,7 @@ $$
PRIMARY KEY (session_id, message_id)
);
CREATE INDEX IF NOT EXISTS inputs_session_id_idx ON events.inputs (session_id);
CREATE INDEX IF NOT EXISTS inputs_label_value_idx ON events.inputs (label, value);
CREATE INDEX IF NOT EXISTS inputs_label_gin_idx ON events.inputs USING GIN (label gin_trgm_ops);
CREATE INDEX IF NOT EXISTS inputs_label_idx ON events.inputs (label);
CREATE INDEX IF NOT EXISTS inputs_timestamp_idx ON events.inputs (timestamp);
CREATE INDEX IF NOT EXISTS inputs_label_session_id_timestamp_idx ON events.inputs (label, session_id, timestamp);
@ -1079,7 +1060,6 @@ $$
name text NOT NULL,
PRIMARY KEY (session_id, message_id)
);
CREATE INDEX IF NOT EXISTS state_actions_name_idx ON events.state_actions (name);
CREATE INDEX IF NOT EXISTS state_actions_name_gin_idx ON events.state_actions USING GIN (name gin_trgm_ops);
CREATE INDEX IF NOT EXISTS state_actions_timestamp_idx ON events.state_actions (timestamp);
@ -1115,17 +1095,12 @@ $$
CREATE INDEX IF NOT EXISTS resources_session_id_idx ON events.resources (session_id);
CREATE INDEX IF NOT EXISTS resources_status_idx ON events.resources (status);
CREATE INDEX IF NOT EXISTS resources_type_idx ON events.resources (type);
CREATE INDEX IF NOT EXISTS resources_duration_durationgt0_idx ON events.resources (duration) WHERE duration > 0;
CREATE INDEX IF NOT EXISTS resources_url_host_idx ON events.resources (url_host);
CREATE INDEX IF NOT EXISTS resources_timestamp_idx ON events.resources (timestamp);
CREATE INDEX IF NOT EXISTS resources_success_idx ON events.resources (success);
CREATE INDEX IF NOT EXISTS resources_url_gin_idx ON events.resources USING GIN (url gin_trgm_ops);
CREATE INDEX IF NOT EXISTS resources_url_idx ON events.resources (url);
CREATE INDEX IF NOT EXISTS resources_url_hostpath_gin_idx ON events.resources USING GIN (url_hostpath gin_trgm_ops);
CREATE INDEX IF NOT EXISTS resources_url_hostpath_idx ON events.resources (url_hostpath);
CREATE INDEX IF NOT EXISTS resources_timestamp_type_durationgt0NN_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL;
CREATE INDEX IF NOT EXISTS resources_session_id_timestamp_idx ON events.resources (session_id, timestamp);
CREATE INDEX IF NOT EXISTS resources_session_id_timestamp_type_idx ON events.resources (session_id, timestamp, type);
CREATE INDEX IF NOT EXISTS resources_timestamp_type_durationgt0NN_noFetch_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL AND type != 'fetch';
CREATE INDEX IF NOT EXISTS resources_session_id_timestamp_url_host_fail_idx ON events.resources (session_id, timestamp, url_host) WHERE success = FALSE;
@ -1242,257 +1217,26 @@ $$
CREATE INDEX IF NOT EXISTS requests_response_body_nn_gin_idx ON events_common.requests USING GIN (response_body gin_trgm_ops) WHERE response_body IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_status_code_nn_idx ON events_common.requests (status_code) WHERE status_code IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_session_id_status_code_nn_idx ON events_common.requests (session_id, status_code) WHERE status_code IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_host_nn_idx ON events_common.requests (host) WHERE host IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_host_nn_gin_idx ON events_common.requests USING GIN (host gin_trgm_ops) WHERE host IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_path_nn_idx ON events_common.requests (path) WHERE path IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_path_nn_gin_idx ON events_common.requests USING GIN (path gin_trgm_ops) WHERE path IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_query_nn_idx ON events_common.requests (query) WHERE query IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_query_nn_gin_idx ON events_common.requests USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL;
CREATE TABLE IF NOT EXISTS assist_records
(
record_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL,
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE SET NULL,
created_at bigint NOT NULL DEFAULT (EXTRACT(EPOCH FROM now() at time zone 'utc') * 1000)::bigint,
deleted_at timestamp without time zone NULL DEFAULT NULL,
name text NOT NULL,
file_key text NOT NULL,
duration integer NOT NULL
);
END IF;
END;
$$
LANGUAGE plpgsql;
INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type,
view_type)
VALUES ('Captured sessions', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'count_sessions', 'predefined', 'overview'),
('Request Load Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_request_load_time', 'predefined', 'overview'),
('Page Load Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_page_load_time', 'predefined', 'overview'),
('Image Load Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_image_load_time', 'predefined', 'overview'),
('DOM Content Load Start', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'),
('First Meaningful paint', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'),
('No. of Visited Pages', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_visited_pages', 'predefined', 'overview'),
('Session Duration', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_session_duration', 'predefined', 'overview'),
('DOM Build Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'),
('Pages Response Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'),
('Response Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_response_time', 'predefined', 'overview'),
('First Paint', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_first_paint', 'predefined', 'overview'),
('DOM Content Loaded', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'),
('Time Till First byte', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'),
('Time To Interactive', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'),
('Captured requests', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'count_requests', 'predefined', 'overview'),
('Time To Render', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_time_to_render', 'predefined', 'overview'),
('Memory Consumption', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'),
('CPU Load', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_cpu', 'predefined', 'overview'),
('Frame rate', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_fps', 'predefined', 'overview'),
('Sessions Affected by JS Errors', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'impacted_sessions_by_js_errors', 'predefined', 'barChart'),
('Top Domains with 4xx Fetch Errors', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'domains_errors_4xx', 'predefined', 'lineChart'),
('Top Domains with 5xx Fetch Errors', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'domains_errors_5xx', 'predefined', 'lineChart'),
('Errors per Domain', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'errors_per_domains', 'predefined', 'table'),
('Fetch Calls with Errors', 'errors', '{
"col": 4,
"row": 2,
"position": 0
}', true, true, true, 'calls_errors', 'predefined', 'table'),
('Errors by Type', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'errors_per_type', 'predefined', 'barChart'),
('Errors by Origin', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'resources_by_party', 'predefined', 'stackedBarChart'),
('Speed Index by Location', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'speed_location', 'predefined', 'map'),
('Slowest Domains', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'slowest_domains', 'predefined', 'table'),
('Sessions per Browser', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'sessions_per_browser', 'predefined', 'table'),
('Time To Render', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'time_to_render', 'predefined', 'areaChart'),
('Sessions Impacted by Slow Pages', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'impacted_sessions_by_slow_pages', 'predefined', 'areaChart'),
('Memory Consumption', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'memory_consumption', 'predefined', 'areaChart'),
('CPU Load', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'cpu', 'predefined', 'areaChart'),
('Frame Rate', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'fps', 'predefined', 'areaChart'),
('Crashes', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'crashes', 'predefined', 'areaChart'),
('Resources Loaded vs Visually Complete', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'resources_vs_visually_complete', 'predefined', 'areaChart'),
('DOM Build Time', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'pages_dom_buildtime', 'predefined', 'areaChart'),
('Pages Response Time', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'pages_response_time', 'predefined', 'areaChart'),
('Pages Response Time Distribution', 'performance', '{
"col": 4,
"row": 2,
"position": 0
}', true, true, true, 'pages_response_time_distribution', 'predefined', 'barChart'),
('Missing Resources', 'resources', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'missing_resources', 'predefined', 'table'),
('Slowest Resources', 'resources', '{
"col": 4,
"row": 2,
"position": 0
}', true, true, true, 'slowest_resources', 'predefined', 'table'),
('Resources Fetch Time', 'resources', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'resources_loading_time', 'predefined', 'table'),
('Resource Loaded vs Response End', 'resources', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'resource_type_vs_response_end', 'predefined', 'stackedBarLineChart'),
('Breakdown of Loaded Resources', 'resources', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'resources_count_by_type', 'predefined', 'stackedBarChart')
ON CONFLICT (predefined_key) DO UPDATE
SET name=excluded.name,
category=excluded.category,
default_config=excluded.default_config,
is_predefined=excluded.is_predefined,
is_template=excluded.is_template,
is_public=excluded.is_public,
metric_type=excluded.metric_type,
view_type=excluded.view_type;
COMMIT;
COMMIT;

View file

@ -9,14 +9,14 @@
"version": "1.0.0",
"license": "Elastic License 2.0 (ELv2)",
"dependencies": {
"@maxmind/geoip2-node": "^3.4.0",
"@socket.io/redis-adapter": "^7.2.0",
"express": "^4.18.1",
"jsonwebtoken": "^8.5.1",
"redis": "^4.2.0",
"socket.io": "^4.5.1",
"ua-parser-js": "^1.0.2",
"uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.10.0"
"@maxmind/geoip2-node": "^3.5.0",
"@socket.io/redis-adapter": "^8.0.1",
"express": "^4.18.2",
"jsonwebtoken": "^9.0.0",
"redis": "^4.5.1",
"socket.io": "^4.5.4",
"ua-parser-js": "^1.0.32",
"uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.19.0"
}
},
"node_modules/@maxmind/geoip2-node": {
@ -88,14 +88,14 @@
"integrity": "sha512-+9jVqKhRSpsc591z5vX+X5Yyw+he/HCB4iQ/RYxw35CEPaY1gnsNE43nf9n9AaYjAQrTiI/mOwKUKdUs9vf7Xg=="
},
"node_modules/@socket.io/redis-adapter": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/@socket.io/redis-adapter/-/redis-adapter-7.2.0.tgz",
"integrity": "sha512-/r6oF6Myz0K9uatB/pfCi0BhKg/KRMh1OokrqcjlNz6aq40WiXdFLRbHJQuwGHq/KvB+D6141K+IynbVxZGvhw==",
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/@socket.io/redis-adapter/-/redis-adapter-8.0.1.tgz",
"integrity": "sha512-qjYRaf+Xp/OEkQDqWwEXnCjcuBUtVCrV1loXTIqICF4D+HHAv4T4jgPHUagyzJQ9M/RmJL25GlG15wLjV2O37g==",
"dependencies": {
"debug": "~4.3.1",
"notepack.io": "~2.2.0",
"socket.io-adapter": "^2.4.0",
"uid2": "0.0.3"
"notepack.io": "~3.0.1",
"socket.io-adapter": "~2.4.0",
"uid2": "1.0.0"
},
"engines": {
"node": ">=10.0.0"
@ -107,14 +107,17 @@
"integrity": "sha512-XW/Aa8APYr6jSVVA1y/DEIZX0/GMKLEVekNG727R8cs56ahETkRAy/3DR7+fJyh7oUgGwNQaRfXCun0+KbWY7Q=="
},
"node_modules/@types/cors": {
"version": "2.8.12",
"resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.12.tgz",
"integrity": "sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw=="
"version": "2.8.13",
"resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.13.tgz",
"integrity": "sha512-RG8AStHlUiV5ysZQKq97copd2UmVYw3/pRMLefISZ3S1hK104Cwm7iLQ3fTKx+lsUH2CE8FlLaYeEA2LSeqYUA==",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/node": {
"version": "18.11.9",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.9.tgz",
"integrity": "sha512-CRpX21/kGdzjOpFsZSkcrXMGIBWMGNIHXXBVFSH+ggkftxg+XYP20TESbh+zFvFj3EQOl5byk0HTRn1IL6hbqg=="
"version": "18.11.18",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.18.tgz",
"integrity": "sha512-DHQpWGjyQKSHj3ebjFI/wRKcqQcdR+MoFBygntYOZytCqNfkd2ZC4ARDJ2DQqhjH5p85Nnd3jhUJIXrszFX/JA=="
},
"node_modules/accepts": {
"version": "1.3.8",
@ -370,9 +373,9 @@
}
},
"node_modules/engine.io-parser": {
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.4.tgz",
"integrity": "sha512-+nVFp+5z1E3HcToEnO7ZIj3g+3k9389DvWtvJZz0T6/eOCPIyyxehFcedoYrZQrp0LgQbD9pPXhpMBKMd5QURg==",
"version": "5.0.6",
"resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.6.tgz",
"integrity": "sha512-tjuoZDMAdEhVnSFleYPCtdL2GXwVTGtNjoeJd9IhIG3C1xs9uwxqRNEu5WpnDZCaozwVlK/nuQhpodhXSIMaxw==",
"engines": {
"node": ">=10.0.0"
}
@ -520,9 +523,9 @@
}
},
"node_modules/get-intrinsic": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz",
"integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==",
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz",
"integrity": "sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==",
"dependencies": {
"function-bind": "^1.1.1",
"has": "^1.0.3",
@ -608,24 +611,18 @@
"integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA=="
},
"node_modules/jsonwebtoken": {
"version": "8.5.1",
"resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz",
"integrity": "sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w==",
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.0.tgz",
"integrity": "sha512-tuGfYXxkQGDPnLJ7SibiQgVgeDgfbPq2k2ICcbgqW8WxWLBAxKQM/ZCu/IT8SOSwmaYl4dpTFCW5xZv7YbbWUw==",
"dependencies": {
"jws": "^3.2.2",
"lodash.includes": "^4.3.0",
"lodash.isboolean": "^3.0.3",
"lodash.isinteger": "^4.0.4",
"lodash.isnumber": "^3.0.3",
"lodash.isplainobject": "^4.0.6",
"lodash.isstring": "^4.0.1",
"lodash.once": "^4.0.0",
"lodash": "^4.17.21",
"ms": "^2.1.1",
"semver": "^5.6.0"
"semver": "^7.3.8"
},
"engines": {
"node": ">=4",
"npm": ">=1.4.28"
"node": ">=12",
"npm": ">=6"
}
},
"node_modules/jsprim": {
@ -661,40 +658,21 @@
"safe-buffer": "^5.0.1"
}
},
"node_modules/lodash.includes": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz",
"integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w=="
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"node_modules/lodash.isboolean": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz",
"integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg=="
},
"node_modules/lodash.isinteger": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz",
"integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA=="
},
"node_modules/lodash.isnumber": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz",
"integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw=="
},
"node_modules/lodash.isplainobject": {
"version": "4.0.6",
"resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz",
"integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA=="
},
"node_modules/lodash.isstring": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz",
"integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw=="
},
"node_modules/lodash.once": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz",
"integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg=="
"node_modules/lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/map-obj": {
"version": "4.3.0",
@ -794,9 +772,9 @@
}
},
"node_modules/notepack.io": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/notepack.io/-/notepack.io-2.2.0.tgz",
"integrity": "sha512-9b5w3t5VSH6ZPosoYnyDONnUTF8o0UkBw7JLA6eBlYJWyGT1Q3vQa8Hmuj1/X6RYvHjjygBDgw6fJhe0JEojfw=="
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/notepack.io/-/notepack.io-3.0.1.tgz",
"integrity": "sha512-TKC/8zH5pXIAMVQio2TvVDTtPRX+DJPHDqjRbxogtFiByHyzKmy96RA0JtCQJ+WouyyL4A10xomQzgbUT+1jCg=="
},
"node_modules/object-assign": {
"version": "4.1.1",
@ -807,9 +785,9 @@
}
},
"node_modules/object-inspect": {
"version": "1.12.2",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz",
"integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==",
"version": "1.12.3",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz",
"integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==",
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
@ -935,11 +913,17 @@
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"node_modules/semver": {
"version": "5.7.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
"integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
"version": "7.3.8",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
"dependencies": {
"lru-cache": "^6.0.0"
},
"bin": {
"semver": "bin/semver"
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/send": {
@ -1037,9 +1021,9 @@
"integrity": "sha512-W4N+o69rkMEGVuk2D/cvca3uYsvGlMwsySWV447y99gUPghxq42BxqLNMndb+a1mm/5/7NeXVQS7RLa2XyXvYg=="
},
"node_modules/socket.io-parser": {
"version": "4.2.1",
"resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-4.2.1.tgz",
"integrity": "sha512-V4GrkLy+HeF1F/en3SpUaM+7XxYXpuMUWLGde1kSSh5nQMN4hLrbPIkD+otwh6q9R6NOQBN4AMaOZ2zVjui82g==",
"version": "4.2.2",
"resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-4.2.2.tgz",
"integrity": "sha512-DJtziuKypFkMMHCm2uIshOYC7QaylbtzQwiMYDuCKy3OPkjLzu4B2vAhTlqipRHHzrI0NJeBAizTK7X+6m1jVw==",
"dependencies": {
"@socket.io/component-emitter": "~3.1.0",
"debug": "~4.3.1"
@ -1114,9 +1098,12 @@
}
},
"node_modules/uid2": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/uid2/-/uid2-0.0.3.tgz",
"integrity": "sha512-5gSP1liv10Gjp8cMEnFd6shzkL/D6W1uhXSFNCxDC+YI8+L8wkCYCbJ7n77Ezb4wE/xzMogecE+DtamEe9PZjg=="
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/uid2/-/uid2-1.0.0.tgz",
"integrity": "sha512-+I6aJUv63YAcY9n4mQreLUt0d4lvwkkopDNmpomkAUz0fAkEMV9pRWxN0EjhW1YfRhcuyHg2v3mwddCDW1+LFQ==",
"engines": {
"node": ">= 4.0.0"
}
},
"node_modules/unpipe": {
"version": "1.0.0",
@ -1135,8 +1122,8 @@
}
},
"node_modules/uWebSockets.js": {
"version": "20.10.0",
"resolved": "git+ssh://git@github.com/uNetworking/uWebSockets.js.git#806df48c9da86af7b3341f3e443388c7cd15c3de"
"version": "20.19.0",
"resolved": "git+ssh://git@github.com/uNetworking/uWebSockets.js.git#42c9c0d5d31f46ca4115dc75672b0037ec970f28"
},
"node_modules/vary": {
"version": "1.1.2",

View file

@ -18,13 +18,13 @@
},
"homepage": "https://github.com/openreplay/openreplay#readme",
"dependencies": {
"@maxmind/geoip2-node": "^3.4.0",
"@socket.io/redis-adapter": "^7.2.0",
"express": "^4.18.1",
"jsonwebtoken": "^8.5.1",
"redis": "^4.2.0",
"socket.io": "^4.5.1",
"ua-parser-js": "^1.0.2",
"uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.10.0"
"@maxmind/geoip2-node": "^3.5.0",
"@socket.io/redis-adapter": "^8.0.1",
"express": "^4.18.2",
"jsonwebtoken": "^9.0.0",
"redis": "^4.5.1",
"socket.io": "^4.5.4",
"ua-parser-js": "^1.0.32",
"uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.19.0"
}
}

View file

@ -9,7 +9,7 @@
"version": "1.0.0",
"license": "Elastic License 2.0 (ELv2)",
"dependencies": {
"express": "^4.18.1",
"express": "^4.18.2",
"peer": "^v1.0.0-rc.4"
}
},

View file

@ -18,7 +18,7 @@
},
"homepage": "https://github.com/openreplay/openreplay#readme",
"dependencies": {
"express": "^4.18.1",
"express": "^4.18.2",
"peer": "^v1.0.0-rc.4"
}
}

View file

@ -0,0 +1,13 @@
BEGIN;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.8.3'
$$ LANGUAGE sql IMMUTABLE;
ALTER TABLE IF EXISTS public.webhooks
ALTER COLUMN type SET DEFAULT 'webhook';
ALTER TYPE webhook_type ADD VALUE IF NOT EXISTS 'msteams';
COMMIT;

View file

@ -58,7 +58,7 @@ spec:
name: minio
port:
number: 9000
path: /(minio|mobs|sessions-assets|frontend|static|sourcemaps|ios-images)/
path: /(minio|mobs|sessions-assets|frontend|static|sourcemaps|ios-images|records)/
tls:
- hosts:
- {{ .Values.global.domainName }}

View file

@ -5,7 +5,7 @@ set -e
cd /tmp
buckets=("mobs" "sessions-assets" "sourcemaps" "sessions-mobile-assets" "quickwit" "vault-data")
buckets=("mobs" "sessions-assets" "static" "sourcemaps" "sessions-mobile-assets" "quickwit" "vault-data" "records")
mc alias set minio $MINIO_HOST $MINIO_ACCESS_KEY $MINIO_SECRET_KEY

View file

@ -0,0 +1,308 @@
BEGIN;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.10.0'
$$ LANGUAGE sql IMMUTABLE;
ALTER TYPE webhook_type ADD VALUE IF NOT EXISTS 'msteams';
UPDATE metrics
SET is_public= TRUE;
ALTER TABLE IF EXISTS metrics
ALTER COLUMN metric_type TYPE text,
ALTER COLUMN metric_type SET DEFAULT 'timeseries',
ALTER COLUMN view_type TYPE text,
ALTER COLUMN view_type SET DEFAULT 'lineChart',
ADD COLUMN IF NOT EXISTS thumbnail text;
DO
$$
BEGIN
IF EXISTS(SELECT column_name
FROM information_schema.columns
WHERE table_name = 'metrics'
and column_name = 'is_predefined') THEN
-- 1. pre transform structure
ALTER TABLE IF EXISTS metrics
ADD COLUMN IF NOT EXISTS o_metric_id INTEGER,
ADD COLUMN IF NOT EXISTS o_widget_id INTEGER;
-- 2. insert predefined metrics related to dashboards as custom metrics
INSERT INTO metrics(project_id, user_id, name, metric_type, view_type, metric_of, metric_value,
metric_format, default_config, is_public, o_metric_id, o_widget_id)
SELECT dashboards.project_id,
dashboard_widgets.user_id,
metrics.name,
left(category, 1) || right(replace(initcap(category), ' ', ''), -1) AS metric_type,
'chart' AS view_type,
left(predefined_key, 1) || right(replace(initcap(predefined_key), '_', ''), -1) AS metric_of,
metric_value,
metric_format,
default_config,
TRUE AS is_public,
metrics.metric_id,
dashboard_widgets.widget_id
FROM metrics
INNER JOIN dashboard_widgets USING (metric_id)
INNER JOIN dashboards USING (dashboard_id)
WHERE is_predefined;
-- 3. update widgets
UPDATE dashboard_widgets
SET metric_id=metrics.metric_id
FROM metrics
WHERE metrics.o_widget_id IS NOT NULL
AND dashboard_widgets.widget_id = metrics.o_widget_id;
-- 4. delete predefined metrics
DELETE
FROM metrics
WHERE is_predefined;
ALTER TABLE IF EXISTS metrics
DROP COLUMN IF EXISTS active,
DROP COLUMN IF EXISTS is_predefined,
DROP COLUMN IF EXISTS predefined_key,
DROP COLUMN IF EXISTS is_template,
DROP COLUMN IF EXISTS category,
DROP COLUMN IF EXISTS o_metric_id,
DROP COLUMN IF EXISTS o_widget_id,
DROP CONSTRAINT IF EXISTS null_project_id_for_template_only,
DROP CONSTRAINT IF EXISTS metrics_unique_key,
DROP CONSTRAINT IF EXISTS unique_key;
END IF;
END;
$$
LANGUAGE plpgsql;
DROP TYPE IF EXISTS metric_type;
DROP TYPE IF EXISTS metric_view_type;
ALTER TABLE IF EXISTS events.clicks
ADD COLUMN IF NOT EXISTS path text;
DROP INDEX IF EXISTS events.clicks_url_gin_idx;
DROP INDEX IF EXISTS events.inputs_label_value_idx;
DROP INDEX IF EXISTS events.inputs_label_idx;
DROP INDEX IF EXISTS events.pages_base_path_idx;
DROP INDEX IF EXISTS events.pages_base_path_idx1;
DROP INDEX IF EXISTS events.pages_base_path_idx2;
DROP INDEX IF EXISTS events.pages_base_referrer_gin_idx1;
DROP INDEX IF EXISTS events.pages_base_referrer_gin_idx2;
DROP INDEX IF EXISTS events.resources_url_gin_idx;
DROP INDEX IF EXISTS events.resources_url_idx;
DROP INDEX IF EXISTS events.resources_url_hostpath_idx;
DROP INDEX IF EXISTS events.resources_session_id_timestamp_idx;
DROP INDEX IF EXISTS events.resources_duration_durationgt0_idx;
DROP INDEX IF EXISTS events.state_actions_name_idx;
DROP INDEX IF EXISTS events_common.requests_query_nn_idx;
DROP INDEX IF EXISTS events_common.requests_host_nn_idx;
DROP INDEX IF EXISTS events_common.issues_context_string_gin_idx;
DROP INDEX IF EXISTS public.sessions_user_country_gin_idx;
DROP INDEX IF EXISTS public.sessions_user_browser_gin_idx;
DROP INDEX IF EXISTS public.sessions_user_os_gin_idx;
DROP INDEX IF EXISTS public.issues_context_string_gin_idx;
ALTER TABLE IF EXISTS projects
ADD COLUMN IF NOT EXISTS beacon_size integer NOT NULL DEFAULT 0;
-- To migrate saved search data
-- SET client_min_messages TO NOTICE;
-- SET client_min_messages TO NOTICE;
CREATE OR REPLACE FUNCTION get_new_event_key(key text)
RETURNS text AS
$$
DECLARE
events_map CONSTANT JSONB := '{
"CLICK": "click",
"INPUT": "input",
"LOCATION": "location",
"CUSTOM": "custom",
"REQUEST": "request",
"FETCH": "fetch",
"GRAPHQL": "graphql",
"STATEACTION": "stateAction",
"ERROR": "error",
"CLICK_IOS": "clickIos",
"INPUT_IOS": "inputIos",
"VIEW_IOS": "viewIos",
"CUSTOM_IOS": "customIos",
"REQUEST_IOS": "requestIos",
"ERROR_IOS": "errorIos",
"DOM_COMPLETE": "domComplete",
"LARGEST_CONTENTFUL_PAINT_TIME": "largestContentfulPaintTime",
"TIME_BETWEEN_EVENTS": "timeBetweenEvents",
"TTFB": "ttfb",
"AVG_CPU_LOAD": "avgCpuLoad",
"AVG_MEMORY_USAGE": "avgMemoryUsage",
"FETCH_FAILED": "fetchFailed"
}';
BEGIN
RETURN jsonb_extract_path(events_map, key);
END;
$$ LANGUAGE plpgsql IMMUTABLE;
CREATE OR REPLACE FUNCTION get_new_event_filter_key(key text)
RETURNS text AS
$$
DECLARE
event_filters_map CONSTANT JSONB := '{
"FETCH_URL": "fetchUrl",
"FETCH_STATUS_CODE": "fetchStatusCode",
"FETCH_METHOD": "fetchMethod",
"FETCH_DURATION": "fetchDuration",
"FETCH_REQUEST_BODY": "fetchRequestBody",
"FETCH_RESPONSE_BODY": "fetchResponseBody",
"GRAPHQL_NAME": "graphqlName",
"GRAPHQL_METHOD": "graphqlMethod",
"GRAPHQL_REQUEST_BODY": "graphqlRequestBody",
"GRAPHQL_RESPONSE_BODY": "graphqlResponseBody"
}';
BEGIN
RETURN jsonb_extract_path(event_filters_map, key);
END;
$$ LANGUAGE plpgsql IMMUTABLE;
CREATE OR REPLACE FUNCTION get_new_filter_key(key text)
RETURNS text AS
$$
DECLARE
filters_map CONSTANT JSONB := '{
"USEROS": "userOs",
"USERBROWSER": "userBrowser",
"USERDEVICE": "userDevice",
"USERCOUNTRY": "userCountry",
"USERID": "userId",
"USERANONYMOUSID": "userAnonymousId",
"REFERRER": "referrer",
"REVID": "revId",
"USEROS_IOS": "userOsIos",
"USERDEVICE_IOS": "userDeviceIos",
"USERCOUNTRY_IOS": "userCountryIos",
"USERID_IOS": "userIdIos",
"USERANONYMOUSID_IOS": "userAnonymousIdIos",
"REVID_IOS": "revIdIos",
"DURATION": "duration",
"PLATFORM": "platform",
"METADATA": "metadata",
"ISSUE": "issue",
"EVENTS_COUNT": "eventsCount",
"UTM_SOURCE": "utmSource",
"UTM_MEDIUM": "utmMedium",
"UTM_CAMPAIGN": "utmCampaign"
}';
BEGIN
RETURN jsonb_extract_path(filters_map, key);
END;
$$ LANGUAGE plpgsql IMMUTABLE;
DO
$$
DECLARE
row RECORD;
events_att JSONB;
event_filters_att JSONB;
filters_att JSONB;
element JSONB;
s_element JSONB;
new_value TEXT;
new_events JSONB[];
new_filters JSONB[];
new_event_filters JSONB[];
changed BOOLEAN;
planned_update JSONB[];
BEGIN
planned_update := '{}'::jsonb[];
FOR row IN SELECT * FROM searches
LOOP
-- Transform events attributes
events_att := row.filter -> 'events';
IF events_att IS NOT NULL THEN
new_events := '{}'::jsonb[];
FOR element IN SELECT jsonb_array_elements(events_att)
LOOP
changed := FALSE;
new_value := get_new_event_key(element ->> 'type');
if new_value IS NOT NULL THEN
changed := TRUE;
new_value := replace(new_value, '"', '');
element := element || jsonb_build_object('type', new_value);
END IF;
-- Transform event's sub-filters attributes
event_filters_att := element -> 'filters';
new_event_filters := '{}'::jsonb[];
IF event_filters_att IS NOT NULL AND jsonb_array_length(event_filters_att) > 0 THEN
FOR s_element IN SELECT jsonb_array_elements(event_filters_att)
LOOP
new_value := get_new_event_filter_key(s_element ->> 'type');
if new_value IS NOT NULL THEN
changed := TRUE;
new_value := replace(new_value, '"', '');
s_element := s_element || jsonb_build_object('type', new_value);
new_event_filters := array_append(new_event_filters, s_element);
END IF;
END LOOP;
element := element || jsonb_build_object('filters', new_event_filters);
END IF;
IF changed THEN
new_events := array_append(new_events, element);
END IF;
END LOOP;
IF array_length(new_events, 1) > 0 THEN
row.filter := row.filter || jsonb_build_object('events', new_events);
END IF;
END IF;
-- Transform filters attributes
filters_att := row.filter -> 'filters';
IF filters_att IS NOT NULL THEN
new_filters := '{}'::jsonb;
FOR element IN SELECT jsonb_array_elements(filters_att)
LOOP
new_value := get_new_filter_key(element ->> 'type');
if new_value IS NOT NULL THEN
new_value := replace(new_value, '"', '');
element := element || jsonb_build_object('type', new_value);
new_filters := array_append(new_filters, element);
END IF;
END LOOP;
IF array_length(new_filters, 1) > 0 THEN
row.filter := row.filter || jsonb_build_object('filters', new_filters);
END IF;
END IF;
IF array_length(new_events, 1) > 0 OR array_length(new_filters, 1) > 0 THEN
planned_update := array_append(planned_update,
jsonb_build_object('id', row.search_id, 'change', row.filter));
END IF;
END LOOP;
-- Update saved search
IF array_length(planned_update, 1) > 0 THEN
raise notice 'must update % elements',array_length(planned_update, 1);
UPDATE searches
SET filter=changes.change -> 'change'
FROM (SELECT unnest(planned_update)) AS changes(change)
WHERE search_id = (changes.change -> 'id')::integer;
raise notice 'update done';
ELSE
raise notice 'nothing to update';
END IF;
END ;
$$
LANGUAGE plpgsql;
COMMIT;
CREATE INDEX CONCURRENTLY IF NOT EXISTS clicks_selector_idx ON events.clicks (selector);
CREATE INDEX CONCURRENTLY IF NOT EXISTS clicks_path_idx ON events.clicks (path);
CREATE INDEX CONCURRENTLY IF NOT EXISTS clicks_path_gin_idx ON events.clicks USING GIN (path gin_trgm_ops);
CREATE INDEX CONCURRENTLY IF NOT EXISTS issues_project_id_issue_id_idx ON public.issues (project_id, issue_id);

View file

@ -67,6 +67,13 @@ DROP INDEX IF EXISTS events_common.requests_url_gin_idx2;
DROP INDEX IF EXISTS events.resources_url_gin_idx;
DROP INDEX IF EXISTS events.resources_url_idx;
UPDATE metrics
SET default_config=default_config || '{
"col": 4
}'::jsonb
WHERE NOT is_predefined
AND (metric_type = 'funnel' OR (metric_type = 'table' AND metric_of IN ('SESSIONS', 'js_exception')));
COMMIT;
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_session_id_status_code_nn_idx ON events_common.requests (session_id, status_code) WHERE status_code IS NOT NULL;

View file

@ -6,10 +6,9 @@ CREATE SCHEMA IF NOT EXISTS events;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.9.0'
SELECT 'v1.10.0'
$$ LANGUAGE sql IMMUTABLE;
-- --- accounts.sql ---
CREATE OR REPLACE FUNCTION generate_api_key(length integer) RETURNS text AS
$$
@ -29,7 +28,6 @@ begin
end;
$$ LANGUAGE plpgsql;
-- --- events.sql ---
CREATE OR REPLACE FUNCTION events.funnel(steps integer[], m integer) RETURNS boolean AS
$$
@ -54,7 +52,6 @@ BEGIN
END;
$$ LANGUAGE plpgsql IMMUTABLE;
-- --- integrations.sql ---
CREATE OR REPLACE FUNCTION notify_integration() RETURNS trigger AS
$$
@ -70,7 +67,6 @@ BEGIN
END;
$$ LANGUAGE plpgsql;
-- --- alerts.sql ---
CREATE OR REPLACE FUNCTION notify_alert() RETURNS trigger AS
$$
@ -87,7 +83,6 @@ BEGIN
END ;
$$ LANGUAGE plpgsql;
-- --- projects.sql ---
CREATE OR REPLACE FUNCTION notify_project() RETURNS trigger AS
$$
@ -110,11 +105,9 @@ $$
ELSE
raise notice 'Creating DB';
-- --- public.sql ---
CREATE EXTENSION IF NOT EXISTS pg_trgm;
CREATE EXTENSION IF NOT EXISTS pgcrypto;
-- --- accounts.sql ---
CREATE TABLE tenants
(
@ -141,9 +134,9 @@ $$
email text NOT NULL UNIQUE,
role user_role NOT NULL DEFAULT 'member',
name text NOT NULL,
created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
deleted_at timestamp without time zone NULL DEFAULT NULL,
api_key text UNIQUE default generate_api_key(20) not null,
api_key text UNIQUE DEFAULT generate_api_key(20) NOT NULL,
jwt_iat timestamp without time zone NULL DEFAULT NULL,
data jsonb NOT NULL DEFAULT '{}'::jsonb,
weekly_report boolean NOT NULL DEFAULT TRUE
@ -171,7 +164,6 @@ $$
);
CREATE UNIQUE INDEX oauth_authentication_unique_user_id_provider_idx ON oauth_authentication (user_id, provider);
-- --- projects.sql ---
CREATE TABLE projects
(
@ -201,7 +193,8 @@ $$
"defaultInputMode": "plain"
}'::jsonb,
first_recorded_session_at timestamp without time zone NULL DEFAULT NULL,
sessions_last_check_at timestamp without time zone NULL DEFAULT NULL
sessions_last_check_at timestamp without time zone NULL DEFAULT NULL,
beacon_size integer NOT NULL DEFAULT 0
);
CREATE INDEX projects_project_key_idx ON public.projects (project_key);
@ -214,25 +207,22 @@ $$
EXECUTE PROCEDURE notify_project();
-- --- webhooks.sql ---
CREATE TYPE webhook_type AS ENUM ('webhook', 'slack', 'email', 'msteams');
create type webhook_type as enum ('webhook', 'slack', 'email');
create table webhooks
CREATE TABLE webhooks
(
webhook_id integer generated by default as identity
webhook_id integer generated by DEFAULT as identity
constraint webhooks_pkey
primary key,
endpoint text not null,
created_at timestamp default timezone('utc'::text, now()) not null,
endpoint text NOT NULL,
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
deleted_at timestamp,
auth_header text,
type webhook_type not null,
index integer default 0 not null,
type webhook_type NOT NULL DEFAULT 'webhook',
index integer DEFAULT 0 NOT NULL,
name varchar(100)
);
-- --- notifications.sql ---
CREATE TABLE notifications
(
@ -258,16 +248,15 @@ $$
constraint user_viewed_notifications_pkey primary key (user_id, notification_id)
);
-- --- funnels.sql ---
CREATE TABLE funnels
(
funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
name text not null,
filter jsonb not null,
created_at timestamp default timezone('utc'::text, now()) not null,
name text NOT NULL,
filter jsonb NOT NULL,
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
deleted_at timestamp,
is_public boolean NOT NULL DEFAULT False
);
@ -275,25 +264,23 @@ $$
CREATE INDEX funnels_user_id_is_public_idx ON public.funnels (user_id, is_public);
CREATE INDEX funnels_project_id_idx ON public.funnels (project_id);
-- --- announcements.sql ---
create type announcement_type as enum ('notification', 'alert');
CREATE TYPE announcement_type AS ENUM ('notification', 'alert');
create table announcements
CREATE TABLE announcements
(
announcement_id serial not null
announcement_id serial NOT NULL
constraint announcements_pk
primary key,
title text not null,
description text not null,
title text NOT NULL,
description text NOT NULL,
button_text varchar(30),
button_url text,
image_url text,
created_at timestamp default timezone('utc'::text, now()) not null,
type announcement_type default 'notification'::announcement_type not null
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
type announcement_type DEFAULT 'notification'::announcement_type NOT NULL
);
-- --- integrations.sql ---
CREATE TYPE integration_provider AS ENUM ('bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic', 'elasticsearch'); --, 'jira', 'github');
CREATE TABLE integrations
@ -312,20 +299,19 @@ $$
EXECUTE PROCEDURE notify_integration();
create table jira_cloud
CREATE TABLE jira_cloud
(
user_id integer not null
user_id integer NOT NULL
constraint jira_cloud_pk
primary key
constraint jira_cloud_users_fkey
references users
on delete cascade,
username text not null,
token text not null,
username text NOT NULL,
token text NOT NULL,
url text
);
-- --- issues.sql ---
CREATE TYPE issue_type AS ENUM (
'click_rage',
@ -358,10 +344,9 @@ $$
context jsonb DEFAULT NULL
);
CREATE INDEX issues_issue_id_type_idx ON issues (issue_id, type);
CREATE INDEX issues_context_string_gin_idx ON public.issues USING GIN (context_string gin_trgm_ops);
CREATE INDEX issues_project_id_issue_id_idx ON public.issues (project_id, issue_id);
CREATE INDEX issues_project_id_idx ON issues (project_id);
-- --- errors.sql ---
CREATE TYPE error_source AS ENUM ('js_exception', 'bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic', 'elasticsearch');
CREATE TYPE error_status AS ENUM ('unresolved', 'resolved', 'ignored');
@ -406,7 +391,6 @@ $$
CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id);
-- --- sessions.sql ---
CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other');
CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS','AC','AN','BU','CP','CS','CT','DD','DG','DY','EA','FQ','FX','HV','IC','JT','MI','NH','NQ','NT','PC','PU','PZ','RH','SU','TA','TP','VD','WK','YD','YU','ZR');
CREATE TYPE platform AS ENUM ('web','ios','android');
@ -485,12 +469,9 @@ $$
CREATE INDEX sessions_metadata8_gin_idx ON public.sessions USING GIN (metadata_8 gin_trgm_ops);
CREATE INDEX sessions_metadata9_gin_idx ON public.sessions USING GIN (metadata_9 gin_trgm_ops);
CREATE INDEX sessions_metadata10_gin_idx ON public.sessions USING GIN (metadata_10 gin_trgm_ops);
CREATE INDEX sessions_user_os_gin_idx ON public.sessions USING GIN (user_os gin_trgm_ops);
CREATE INDEX sessions_user_browser_gin_idx ON public.sessions USING GIN (user_browser gin_trgm_ops);
CREATE INDEX sessions_user_device_gin_idx ON public.sessions USING GIN (user_device gin_trgm_ops);
CREATE INDEX sessions_user_id_gin_idx ON public.sessions USING GIN (user_id gin_trgm_ops);
CREATE INDEX sessions_user_anonymous_id_gin_idx ON public.sessions USING GIN (user_anonymous_id gin_trgm_ops);
CREATE INDEX sessions_user_country_gin_idx ON public.sessions (project_id, user_country);
CREATE INDEX sessions_start_ts_idx ON public.sessions (start_ts) WHERE duration > 0;
CREATE INDEX sessions_project_id_idx ON public.sessions (project_id) WHERE duration > 0;
CREATE INDEX sessions_session_id_project_id_start_ts_idx ON sessions (session_id, project_id, start_ts) WHERE duration > 0;
@ -532,21 +513,18 @@ $$
);
CREATE INDEX user_favorite_sessions_user_id_session_id_idx ON user_favorite_sessions (user_id, session_id);
-- --- assignments.sql ---
create table assigned_sessions
CREATE TABLE assigned_sessions
(
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
issue_id text NOT NULL,
provider oauth_provider NOT NULL,
created_by integer NOT NULL,
created_at timestamp default timezone('utc'::text, now()) NOT NULL,
provider_data jsonb default '{}'::jsonb NOT NULL
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
provider_data jsonb DEFAULT '{}'::jsonb NOT NULL
);
CREATE INDEX assigned_sessions_session_id_idx ON assigned_sessions (session_id);
-- --- events_common.sql ---
CREATE TYPE events_common.custom_level AS ENUM ('info','error');
@ -604,14 +582,11 @@ $$
CREATE INDEX requests_response_body_nn_gin_idx ON events_common.requests USING GIN (response_body gin_trgm_ops) WHERE response_body IS NOT NULL;
CREATE INDEX requests_status_code_nn_idx ON events_common.requests (status_code) WHERE status_code IS NOT NULL;
CREATE INDEX requests_session_id_status_code_nn_idx ON events_common.requests (session_id, status_code) WHERE status_code IS NOT NULL;
CREATE INDEX requests_host_nn_idx ON events_common.requests (host) WHERE host IS NOT NULL;
CREATE INDEX requests_host_nn_gin_idx ON events_common.requests USING GIN (host gin_trgm_ops) WHERE host IS NOT NULL;
CREATE INDEX requests_path_nn_idx ON events_common.requests (path) WHERE path IS NOT NULL;
CREATE INDEX requests_path_nn_gin_idx ON events_common.requests USING GIN (path gin_trgm_ops) WHERE path IS NOT NULL;
CREATE INDEX requests_query_nn_idx ON events_common.requests (query) WHERE query IS NOT NULL;
CREATE INDEX requests_query_nn_gin_idx ON events_common.requests USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL;
-- --- events.sql ---
CREATE TABLE events.pages
(
@ -641,14 +616,6 @@ $$
CREATE INDEX pages_timestamp_idx ON events.pages (timestamp);
CREATE INDEX pages_session_id_timestamp_idx ON events.pages (session_id, timestamp);
CREATE INDEX pages_base_referrer_idx ON events.pages (base_referrer);
CREATE INDEX pages_base_referrer_gin_idx2 ON events.pages USING GIN (RIGHT(base_referrer,
length(base_referrer) - (CASE
WHEN base_referrer LIKE 'http://%'
THEN 7
WHEN base_referrer LIKE 'https://%'
THEN 8
ELSE 0 END))
gin_trgm_ops);
CREATE INDEX pages_response_time_idx ON events.pages (response_time);
CREATE INDEX pages_response_end_idx ON events.pages (response_end);
CREATE INDEX pages_path_gin_idx ON events.pages USING GIN (path gin_trgm_ops);
@ -683,6 +650,7 @@ $$
timestamp bigint NOT NULL,
label text DEFAULT NULL,
url text DEFAULT '' NOT NULL,
path text,
selector text DEFAULT '' NOT NULL,
PRIMARY KEY (session_id, message_id)
);
@ -692,10 +660,11 @@ $$
CREATE INDEX clicks_timestamp_idx ON events.clicks (timestamp);
CREATE INDEX clicks_label_session_id_timestamp_idx ON events.clicks (label, session_id, timestamp);
CREATE INDEX clicks_url_idx ON events.clicks (url);
CREATE INDEX clicks_url_gin_idx ON events.clicks USING GIN (url gin_trgm_ops);
CREATE INDEX clicks_url_session_id_timestamp_selector_idx ON events.clicks (url, session_id, timestamp, selector);
CREATE INDEX clicks_session_id_timestamp_idx ON events.clicks (session_id, timestamp);
CREATE INDEX clicks_selector_idx ON events.clicks (selector);
CREATE INDEX clicks_path_idx ON events.clicks (path);
CREATE INDEX clicks_path_gin_idx ON events.clicks USING GIN (path gin_trgm_ops);
CREATE TABLE events.inputs
(
@ -707,9 +676,7 @@ $$
PRIMARY KEY (session_id, message_id)
);
CREATE INDEX inputs_session_id_idx ON events.inputs (session_id);
CREATE INDEX inputs_label_value_idx ON events.inputs (label, value);
CREATE INDEX inputs_label_gin_idx ON events.inputs USING GIN (label gin_trgm_ops);
CREATE INDEX inputs_label_idx ON events.inputs (label);
CREATE INDEX inputs_timestamp_idx ON events.inputs (timestamp);
CREATE INDEX inputs_label_session_id_timestamp_idx ON events.inputs (label, session_id, timestamp);
@ -771,7 +738,6 @@ $$
name text NOT NULL,
PRIMARY KEY (session_id, message_id)
);
CREATE INDEX state_actions_name_idx ON events.state_actions (name);
CREATE INDEX state_actions_name_gin_idx ON events.state_actions USING GIN (name gin_trgm_ops);
CREATE INDEX state_actions_timestamp_idx ON events.state_actions (timestamp);
@ -799,17 +765,12 @@ $$
CREATE INDEX resources_session_id_idx ON events.resources (session_id);
CREATE INDEX resources_status_idx ON events.resources (status);
CREATE INDEX resources_type_idx ON events.resources (type);
CREATE INDEX resources_duration_durationgt0_idx ON events.resources (duration) WHERE duration > 0;
CREATE INDEX resources_url_host_idx ON events.resources (url_host);
CREATE INDEX resources_timestamp_idx ON events.resources (timestamp);
CREATE INDEX resources_success_idx ON events.resources (success);
CREATE INDEX resources_url_gin_idx ON events.resources USING GIN (url gin_trgm_ops);
CREATE INDEX resources_url_idx ON events.resources (url);
CREATE INDEX resources_url_hostpath_gin_idx ON events.resources USING GIN (url_hostpath gin_trgm_ops);
CREATE INDEX resources_url_hostpath_idx ON events.resources (url_hostpath);
CREATE INDEX resources_timestamp_type_durationgt0NN_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL;
CREATE INDEX resources_session_id_timestamp_idx ON events.resources (session_id, timestamp);
CREATE INDEX resources_session_id_timestamp_type_idx ON events.resources (session_id, timestamp, type);
CREATE INDEX resources_timestamp_type_durationgt0NN_noFetch_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL AND type != 'fetch';
CREATE INDEX resources_session_id_timestamp_url_host_fail_idx ON events.resources (session_id, timestamp, url_host) WHERE success = FALSE;
@ -847,8 +808,6 @@ $$
CREATE INDEX performance_avg_used_js_heap_size_gt0_idx ON events.performance (avg_used_js_heap_size) WHERE avg_used_js_heap_size > 0;
-- --- autocomplete.sql ---
CREATE TABLE autocomplete
(
value text NOT NULL,
@ -887,8 +846,8 @@ $$
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
action job_action NOT NULL,
reference_id text NOT NULL,
created_at timestamp default timezone('utc'::text, now()) NOT NULL,
updated_at timestamp default timezone('utc'::text, now()) NULL,
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
updated_at timestamp DEFAULT timezone('utc'::text, now()) NULL,
start_at timestamp NOT NULL,
errors text NULL
);
@ -896,37 +855,27 @@ $$
CREATE INDEX jobs_start_at_idx ON jobs (start_at);
CREATE INDEX jobs_project_id_idx ON jobs (project_id);
CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined', 'funnel');
CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart','areaChart','barChart','stackedBarChart','stackedBarLineChart','overview','map');
CREATE TABLE metrics
(
metric_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NULL REFERENCES projects (project_id) ON DELETE CASCADE,
user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
name text NOT NULL,
is_public boolean NOT NULL DEFAULT FALSE,
active boolean NOT NULL DEFAULT TRUE,
created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
project_id integer NULL REFERENCES projects (project_id) ON DELETE CASCADE,
user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
name text NOT NULL,
is_public boolean NOT NULL DEFAULT TRUE,
created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
deleted_at timestamp,
edited_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
metric_type metric_type NOT NULL DEFAULT 'timeseries',
view_type metric_view_type NOT NULL DEFAULT 'lineChart',
metric_of text NOT NULL DEFAULT 'sessionCount',
metric_value text[] NOT NULL DEFAULT '{}'::text[],
edited_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
metric_type text NOT NULL DEFAULT 'timeseries',
view_type text NOT NULL DEFAULT 'lineChart',
metric_of text NOT NULL DEFAULT 'sessionCount',
metric_value text[] NOT NULL DEFAULT '{}'::text[],
metric_format text,
category text NULL DEFAULT 'custom',
is_pinned boolean NOT NULL DEFAULT FALSE,
is_predefined boolean NOT NULL DEFAULT FALSE,
is_template boolean NOT NULL DEFAULT FALSE,
predefined_key text NULL DEFAULT NULL,
default_config jsonb NOT NULL DEFAULT '{
thumbnail text,
default_config jsonb NOT NULL DEFAULT '{
"col": 2,
"row": 2,
"position": 0
}'::jsonb,
CONSTRAINT null_project_id_for_template_only
CHECK ( (metrics.category != 'custom') != (metrics.project_id IS NOT NULL) ),
CONSTRAINT unique_key UNIQUE (predefined_key)
}'::jsonb
);
CREATE INDEX metrics_user_id_is_public_idx ON public.metrics (user_id, is_public);
@ -971,9 +920,9 @@ $$
search_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
name text not null,
filter jsonb not null,
created_at timestamp default timezone('utc'::text, now()) not null,
name text NOT NULL,
filter jsonb NOT NULL,
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
deleted_at timestamp,
is_public boolean NOT NULL DEFAULT False
);
@ -1013,7 +962,7 @@ $$
(
note_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
message text NOT NULL,
created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL,
deleted_at timestamp without time zone NULL DEFAULT NULL,
tag text NULL,
@ -1030,244 +979,4 @@ $$
$$
LANGUAGE plpgsql;
INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type,
view_type)
VALUES ('Captured sessions', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'count_sessions', 'predefined', 'overview'),
('Request Load Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_request_load_time', 'predefined', 'overview'),
('Page Load Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_page_load_time', 'predefined', 'overview'),
('Image Load Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_image_load_time', 'predefined', 'overview'),
('DOM Content Load Start', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'),
('First Meaningful paint', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'),
('No. of Visited Pages', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_visited_pages', 'predefined', 'overview'),
('Session Duration', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_session_duration', 'predefined', 'overview'),
('DOM Build Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'),
('Pages Response Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'),
('Response Time', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_response_time', 'predefined', 'overview'),
('First Paint', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_first_paint', 'predefined', 'overview'),
('DOM Content Loaded', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'),
('Time Till First byte', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'),
('Time To Interactive', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'),
('Captured requests', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'count_requests', 'predefined', 'overview'),
('Time To Render', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_time_to_render', 'predefined', 'overview'),
('Memory Consumption', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'),
('CPU Load', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_cpu', 'predefined', 'overview'),
('Frame rate', 'web vitals', '{
"col": 1,
"row": 1,
"position": 0
}', true, true, true, 'avg_fps', 'predefined', 'overview'),
('Sessions Affected by JS Errors', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'impacted_sessions_by_js_errors', 'predefined', 'barChart'),
('Top Domains with 4xx Fetch Errors', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'domains_errors_4xx', 'predefined', 'lineChart'),
('Top Domains with 5xx Fetch Errors', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'domains_errors_5xx', 'predefined', 'lineChart'),
('Errors per Domain', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'errors_per_domains', 'predefined', 'table'),
('Fetch Calls with Errors', 'errors', '{
"col": 4,
"row": 2,
"position": 0
}', true, true, true, 'calls_errors', 'predefined', 'table'),
('Errors by Type', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'errors_per_type', 'predefined', 'barChart'),
('Errors by Origin', 'errors', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'resources_by_party', 'predefined', 'stackedBarChart'),
('Speed Index by Location', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'speed_location', 'predefined', 'map'),
('Slowest Domains', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'slowest_domains', 'predefined', 'table'),
('Sessions per Browser', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'sessions_per_browser', 'predefined', 'table'),
('Time To Render', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'time_to_render', 'predefined', 'areaChart'),
('Sessions Impacted by Slow Pages', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'impacted_sessions_by_slow_pages', 'predefined', 'areaChart'),
('Memory Consumption', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'memory_consumption', 'predefined', 'areaChart'),
('CPU Load', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'cpu', 'predefined', 'areaChart'),
('Frame Rate', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'fps', 'predefined', 'areaChart'),
('Crashes', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'crashes', 'predefined', 'areaChart'),
('Resources Loaded vs Visually Complete', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'resources_vs_visually_complete', 'predefined', 'areaChart'),
('DOM Build Time', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'pages_dom_buildtime', 'predefined', 'areaChart'),
('Pages Response Time', 'performance', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'pages_response_time', 'predefined', 'areaChart'),
('Pages Response Time Distribution', 'performance', '{
"col": 4,
"row": 2,
"position": 0
}', true, true, true, 'pages_response_time_distribution', 'predefined', 'barChart'),
('Missing Resources', 'resources', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'missing_resources', 'predefined', 'table'),
('Slowest Resources', 'resources', '{
"col": 4,
"row": 2,
"position": 0
}', true, true, true, 'slowest_resources', 'predefined', 'table'),
('Resources Fetch Time', 'resources', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'resources_loading_time', 'predefined', 'table'),
('Resource Loaded vs Response End', 'resources', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'resource_type_vs_response_end', 'predefined', 'stackedBarLineChart'),
('Breakdown of Loaded Resources', 'resources', '{
"col": 2,
"row": 2,
"position": 0
}', true, true, true, 'resources_count_by_type', 'predefined', 'stackedBarChart')
ON CONFLICT (predefined_key) DO UPDATE
SET name=excluded.name,
category=excluded.category,
default_config=excluded.default_config,
is_predefined=excluded.is_predefined,
is_template=excluded.is_template,
is_public=excluded.is_public,
metric_type=excluded.metric_type,
view_type=excluded.view_type;
COMMIT;

View file

@ -9,9 +9,8 @@
"version": "1.0.0",
"license": "Elastic License 2.0 (ELv2)",
"dependencies": {
"aws-sdk": "^2.1172.0",
"express": "^4.18.1",
"request": "^2.88.2",
"aws-sdk": "^2.1284.0",
"express": "^4.18.2",
"source-map": "^0.7.4"
}
},
@ -27,47 +26,11 @@
"node": ">= 0.6"
}
},
"node_modules/ajv": {
"version": "6.12.6",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
"integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
"dependencies": {
"fast-deep-equal": "^3.1.1",
"fast-json-stable-stringify": "^2.0.0",
"json-schema-traverse": "^0.4.1",
"uri-js": "^4.2.2"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/epoberezkin"
}
},
"node_modules/array-flatten": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
"integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg=="
},
"node_modules/asn1": {
"version": "0.2.6",
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz",
"integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==",
"dependencies": {
"safer-buffer": "~2.1.0"
}
},
"node_modules/assert-plus": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
"integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==",
"engines": {
"node": ">=0.8"
}
},
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"node_modules/available-typed-arrays": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz",
@ -80,9 +43,9 @@
}
},
"node_modules/aws-sdk": {
"version": "2.1262.0",
"resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1262.0.tgz",
"integrity": "sha512-XbaK/XUIxwLEBnHANhJ0RTZtiU288lFRj5FllSihQ5Kb0fibKyW8kJFPsY+NzzDezLH5D3WdGbTKb9fycn5TbA==",
"version": "2.1284.0",
"resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1284.0.tgz",
"integrity": "sha512-B9NllAw1kMMPUHpSs4OcUm0xK1el0tNU2qmIVHtRPRbOFT8cQfxy4HF8s2m0ddvMF1ma4tdzB5uNUNcu3c81ag==",
"dependencies": {
"buffer": "4.9.2",
"events": "1.1.1",
@ -99,19 +62,6 @@
"node": ">= 10.0.0"
}
},
"node_modules/aws-sign2": {
"version": "0.7.0",
"resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
"integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==",
"engines": {
"node": "*"
}
},
"node_modules/aws4": {
"version": "1.11.0",
"resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz",
"integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA=="
},
"node_modules/base64-js": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
@ -131,14 +81,6 @@
}
]
},
"node_modules/bcrypt-pbkdf": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
"integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==",
"dependencies": {
"tweetnacl": "^0.14.3"
}
},
"node_modules/body-parser": {
"version": "1.20.1",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz",
@ -192,22 +134,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/caseless": {
"version": "0.12.0",
"resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
"integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw=="
},
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"dependencies": {
"delayed-stream": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/content-disposition": {
"version": "0.5.4",
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz",
@ -240,22 +166,6 @@
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
"integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ=="
},
"node_modules/core-util-is": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
"integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ=="
},
"node_modules/dashdash": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
"integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==",
"dependencies": {
"assert-plus": "^1.0.0"
},
"engines": {
"node": ">=0.10"
}
},
"node_modules/debug": {
"version": "2.6.9",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
@ -264,14 +174,6 @@
"ms": "2.0.0"
}
},
"node_modules/delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/depd": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
@ -289,15 +191,6 @@
"npm": "1.2.8000 || >= 1.4.16"
}
},
"node_modules/ecc-jsbn": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz",
"integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==",
"dependencies": {
"jsbn": "~0.1.0",
"safer-buffer": "^2.1.0"
}
},
"node_modules/ee-first": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
@ -373,29 +266,6 @@
"node": ">= 0.10.0"
}
},
"node_modules/extend": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
},
"node_modules/extsprintf": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz",
"integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==",
"engines": [
"node >=0.6.0"
]
},
"node_modules/fast-deep-equal": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
},
"node_modules/fast-json-stable-stringify": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
"integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="
},
"node_modules/finalhandler": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz",
@ -421,27 +291,6 @@
"is-callable": "^1.1.3"
}
},
"node_modules/forever-agent": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
"integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==",
"engines": {
"node": "*"
}
},
"node_modules/form-data": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz",
"integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.6",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 0.12"
}
},
"node_modules/forwarded": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
@ -476,14 +325,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/getpass": {
"version": "0.1.7",
"resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
"integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==",
"dependencies": {
"assert-plus": "^1.0.0"
}
},
"node_modules/gopd": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz",
@ -495,27 +336,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/har-schema": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz",
"integrity": "sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q==",
"engines": {
"node": ">=4"
}
},
"node_modules/har-validator": {
"version": "5.1.5",
"resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz",
"integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==",
"deprecated": "this library is no longer supported",
"dependencies": {
"ajv": "^6.12.3",
"har-schema": "^2.0.0"
},
"engines": {
"node": ">=6"
}
},
"node_modules/has": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
@ -567,20 +387,6 @@
"node": ">= 0.8"
}
},
"node_modules/http-signature": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
"integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==",
"dependencies": {
"assert-plus": "^1.0.0",
"jsprim": "^1.2.2",
"sshpk": "^1.7.0"
},
"engines": {
"node": ">=0.8",
"npm": ">=1.3.7"
}
},
"node_modules/iconv-lite": {
"version": "0.4.24",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
@ -668,21 +474,11 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/is-typedarray": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
"integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA=="
},
"node_modules/isarray": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
"integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="
},
"node_modules/isstream": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
"integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g=="
},
"node_modules/jmespath": {
"version": "0.16.0",
"resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz",
@ -691,40 +487,6 @@
"node": ">= 0.6.0"
}
},
"node_modules/jsbn": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
"integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg=="
},
"node_modules/json-schema": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz",
"integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA=="
},
"node_modules/json-schema-traverse": {
"version": "0.4.1",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
"integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="
},
"node_modules/json-stringify-safe": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
"integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA=="
},
"node_modules/jsprim": {
"version": "1.4.2",
"resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz",
"integrity": "sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==",
"dependencies": {
"assert-plus": "1.0.0",
"extsprintf": "1.3.0",
"json-schema": "0.4.0",
"verror": "1.10.0"
},
"engines": {
"node": ">=0.6.0"
}
},
"node_modules/media-typer": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
@ -789,14 +551,6 @@
"node": ">= 0.6"
}
},
"node_modules/oauth-sign": {
"version": "0.9.0",
"resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz",
"integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==",
"engines": {
"node": "*"
}
},
"node_modules/object-inspect": {
"version": "1.12.2",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz",
@ -829,11 +583,6 @@
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
"integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ=="
},
"node_modules/performance-now": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
"integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow=="
},
"node_modules/proxy-addr": {
"version": "2.0.7",
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz",
@ -846,18 +595,10 @@
"node": ">= 0.10"
}
},
"node_modules/psl": {
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz",
"integrity": "sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag=="
},
"node_modules/punycode": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
"integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
"engines": {
"node": ">=6"
}
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz",
"integrity": "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw=="
},
"node_modules/qs": {
"version": "6.11.0",
@ -904,54 +645,6 @@
"node": ">= 0.8"
}
},
"node_modules/request": {
"version": "2.88.2",
"resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
"integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==",
"deprecated": "request has been deprecated, see https://github.com/request/request/issues/3142",
"dependencies": {
"aws-sign2": "~0.7.0",
"aws4": "^1.8.0",
"caseless": "~0.12.0",
"combined-stream": "~1.0.6",
"extend": "~3.0.2",
"forever-agent": "~0.6.1",
"form-data": "~2.3.2",
"har-validator": "~5.1.3",
"http-signature": "~1.2.0",
"is-typedarray": "~1.0.0",
"isstream": "~0.1.2",
"json-stringify-safe": "~5.0.1",
"mime-types": "~2.1.19",
"oauth-sign": "~0.9.0",
"performance-now": "^2.1.0",
"qs": "~6.5.2",
"safe-buffer": "^5.1.2",
"tough-cookie": "~2.5.0",
"tunnel-agent": "^0.6.0",
"uuid": "^3.3.2"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/request/node_modules/qs": {
"version": "6.5.3",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz",
"integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==",
"engines": {
"node": ">=0.6"
}
},
"node_modules/request/node_modules/uuid": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
"integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==",
"deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.",
"bin": {
"uuid": "bin/uuid"
}
},
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
@ -1049,30 +742,6 @@
"node": ">= 8"
}
},
"node_modules/sshpk": {
"version": "1.17.0",
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz",
"integrity": "sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ==",
"dependencies": {
"asn1": "~0.2.3",
"assert-plus": "^1.0.0",
"bcrypt-pbkdf": "^1.0.0",
"dashdash": "^1.12.0",
"ecc-jsbn": "~0.1.1",
"getpass": "^0.1.1",
"jsbn": "~0.1.0",
"safer-buffer": "^2.0.2",
"tweetnacl": "~0.14.0"
},
"bin": {
"sshpk-conv": "bin/sshpk-conv",
"sshpk-sign": "bin/sshpk-sign",
"sshpk-verify": "bin/sshpk-verify"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/statuses": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
@ -1089,34 +758,6 @@
"node": ">=0.6"
}
},
"node_modules/tough-cookie": {
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
"integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
"dependencies": {
"psl": "^1.1.28",
"punycode": "^2.1.1"
},
"engines": {
"node": ">=0.8"
}
},
"node_modules/tunnel-agent": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
"integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==",
"dependencies": {
"safe-buffer": "^5.0.1"
},
"engines": {
"node": "*"
}
},
"node_modules/tweetnacl": {
"version": "0.14.5",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
"integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="
},
"node_modules/type-is": {
"version": "1.6.18",
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
@ -1137,14 +778,6 @@
"node": ">= 0.8"
}
},
"node_modules/uri-js": {
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
"integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
"dependencies": {
"punycode": "^2.1.0"
}
},
"node_modules/url": {
"version": "0.10.3",
"resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz",
@ -1154,11 +787,6 @@
"querystring": "0.2.0"
}
},
"node_modules/url/node_modules/punycode": {
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz",
"integrity": "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw=="
},
"node_modules/util": {
"version": "0.12.5",
"resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz",
@ -1195,19 +823,6 @@
"node": ">= 0.8"
}
},
"node_modules/verror": {
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
"integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==",
"engines": [
"node >=0.6.0"
],
"dependencies": {
"assert-plus": "^1.0.0",
"core-util-is": "1.0.2",
"extsprintf": "^1.2.0"
}
},
"node_modules/which-typed-array": {
"version": "1.1.9",
"resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.9.tgz",

Some files were not shown because too many files have changed in this diff Show more